Updates
This commit is contained in:
@@ -94,10 +94,10 @@ model.compile(loss='binary_crossentropy',optimizer=optimizer,metrics=['accuracy'
|
||||
history=model.fit(train_ds, epochs=epochs, validation_data=val_ds,callbacks=[tensorboard,checkpointer,early_stopping])
|
||||
|
||||
# plot the learning curves
|
||||
pyplot.plot(history.history['accuracy'],label='train')
|
||||
pyplot.plot(history.history['val_accuracy'],label='validation')
|
||||
pyplot.legend()
|
||||
pyplot.show()
|
||||
# pyplot.plot(history.history['accuracy'],label='train')
|
||||
# pyplot.plot(history.history['val_accuracy'],label='validation')
|
||||
# pyplot.legend()
|
||||
# pyplot.show()
|
||||
|
||||
|
||||
|
||||
|
||||
@@ -58,6 +58,7 @@ base_filters=32
|
||||
learning_rate=.00001
|
||||
batch_size=256
|
||||
epochs=200
|
||||
patience_on_early_stop=5
|
||||
|
||||
# Create a TensorBoard instance with the path to the logs directory. Before training, in Terminal, run tensorboard --logdir=logs/
|
||||
tensorboard = TensorBoard(log_dir='logs/{}'.format(time()))
|
||||
@@ -156,26 +157,21 @@ model.summary()
|
||||
# File path to save the file. Only save the weights if there is an improvement.
|
||||
checkpointer=ModelCheckpoint(filepath=model_weights_file,verbose=1,save_best_only=True)
|
||||
|
||||
#earlystopping=EarlyStopping(monitor='val_loss',patience=5)
|
||||
#Early Stopping
|
||||
early_stopping=EarlyStopping(monitor='val_loss',mode='min',verbose=1,patience=patience_on_early_stop)
|
||||
|
||||
# Adam optimizer with learning rate .0001
|
||||
#optimizer=adam_v2.Adam(learning_rate=10e-6)
|
||||
#optimizer=adam_v2.Adam(learning_rate=10e-6)
|
||||
#optimizer=adam_v2.Adam(learning_rate=learning_rate)
|
||||
#Optimizer
|
||||
optimizer = keras.optimizers.Adam(learning_rate=learning_rate, beta_1=0.9, beta_2=0.999,epsilon=1e-8)
|
||||
|
||||
# Cross entropy loss function
|
||||
#model.compile(loss='categorical_crossentropy', optimizer=optimizer,metrics=['accuracy'])
|
||||
#model.compile(loss='binary_crossentropy', optimizer='adam',metrics=['accuracy'])
|
||||
model.compile(loss='binary_crossentropy', optimizer=optimizer,metrics=['accuracy'])
|
||||
#model.compile(loss='binary_crossentropy', optimizer=optimizer,metrics=['accuracy'])
|
||||
|
||||
# Allows to do real-time data augmentation on images on cpu in parallel to training your model on gpu.
|
||||
# The callback to checkpointer saves the model wights. Other callback can be added...like a stopping function
|
||||
# history=model.fit_generator(dataGen.flow(x_train,y_train,batch_size=batch_size),callbacks=[checkpointer],steps_per_epoch=x_train.shape[0] //
|
||||
# batch_size,epochs=epochs,verbose=2,validation_data=(x_valid,y_valid))
|
||||
history=model.fit(train_ds, epochs=epochs, validation_data=val_ds,batch_size=batch_size,verbose=2,callbacks=[tensorboard,checkpointer])
|
||||
history=model.fit(train_ds, epochs=epochs, validation_data=val_ds,batch_size=batch_size,verbose=2,callbacks=[earlystopping,tensorboard,checkpointer])
|
||||
|
||||
# Here is the evaluation part
|
||||
# score=model.evaluate(x=train_ds,verbose=1)
|
||||
@@ -200,16 +196,3 @@ history=model.fit(train_ds, epochs=epochs, validation_data=val_ds,batch_size=bat
|
||||
# pyplot.plot(history.history['val_accuracy'],label='validation')
|
||||
# pyplot.legend()
|
||||
# pyplot.show()
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
Reference in New Issue
Block a user