specific_config = {
'learning_rate': 0.0001,
'batch_size': 32,
'epochs': 50,
'layer_1': 256,
'layer_2': 256,
'layer_3': 128,
'layer_4': 128,
'nestrov': True,
'optimizer': 'adam',
'activation': 'relu',
'dropout': 0.3,
'layer_multiplier': 1
}
def get_activation(conf):
string = conf.activation
if string == 'relu':
return tf.nn.relu
elif string == 'elu':
return tf.nn.tanh
elif string == 'tanh':
return tf.nn.elu
def get_optimizer(config):
if config.optimizer == 'adam':
opt = tf.keras.optimizers.Adam( learning_rate=config.learning_rate,
)
elif config.optimizer == 'sgd':
opt = tf.keras.optimizers.SGD( learning_rate=config.learning_rate,
momentum=config.momentum,
nesterov=config.nestrov
)
return opt
run = wandb.init(project='icr-competition', config=specific_config)
config = wandb.config
model = tf.keras.Sequential()
model.add(tf.keras.layers.BatchNormalization())
for i in range(1, 5):
nodes = config['layer_'+str(i)] * config.layer_multiplier
model.add(tf.keras.layers.Dense(nodes, activation=get_activation(config)))
model.add(tf.keras.layers.Dropout(config.dropout))
model.add(tf.keras.layers.Dense(1, activation=tf.nn.softmax))
model.compile(
optimizer= get_optimizer(config),
loss=tf.keras.losses.BinaryCrossentropy(),
metrics=['accuracy']
)
history = model.fit(
inputs, output,
epochs=config.epochs,
batch_size=config.batch_size,
shuffle=True,
validation_split=0.2,
callbacks=[WandbCallback()]
)
I am getting the following error when I run the cell for model.fit(), It would be very helpful if someone can tell me what I am doing wrong in here.