import sys import os from keras.optimizers import SGD from keras.optimizers import Adam from tensorflow.keras.callbacks import TensorBoard import keras #from keras.models import Model from keras.src.legacy.preprocessing.image import ImageDataGenerator from keras.callbacks import ModelCheckpoint #from tensorflow.keras.callbacks import ReduceLROnPlateau from tensorflow.keras.callbacks import EarlyStopping #from keras import regularizers #from keras import optimizers import numpy as np import tensorflow from resnet50 import * import math from time import time # Notes: 10/23/2024 Achived 53.85% # installed # py -mpip install numpy # py -mpip show numpy # py -mpip install tensorflow # py -mpip show tensorflow # py -mpip install matplotlib # c:\users\skess\appdata\local\programs\python\python39\lib\site-packages # Create a TensorBoard instance with the path to the logs directory. Before training, in Terminal, run tensorboard --logdir=logs/ tensorboard = TensorBoard(log_dir='logs/{}'.format(time())) # RESNET50 ARCHITECTURE image_size = (128, 128) #model_weights_file='model_resnet50.hdf5' model_name='resnet50_20241022.h5.keras' train_ds = tensorflow.keras.preprocessing.image_dataset_from_directory( 'C:\\boneyard\\DeepLearning\\data', label_mode="binary", subset="training", validation_split=.20, image_size=image_size, color_mode='grayscale', batch_size=32, seed=50 ) val_ds = tensorflow.keras.preprocessing.image_dataset_from_directory( 'C:\\boneyard\\DeepLearning\\data', label_mode="binary", subset="validation", validation_split=.20, image_size=image_size, color_mode='grayscale', batch_size=32, seed=50 ) input_shape=(128,128,1) epochs=200 initial_lrate=0.01 batch_size=128 patience_on_early_stop=10 metric='accuracy' #ReduceLROnPlateau reduce_lr=tensorflow.keras.callbacks.ReduceLROnPlateau(monitor='val_loss',factor=np.sqrt(0.1),patience=5,min_lr=0.5e-6) #Optimizer optimizer=SGD(learning_rate=initial_lrate, momentum=0.9, nesterov=False) # Checkpointer checkpointer=ModelCheckpoint(filepath=model_name,monitor=metric,verbose=1,save_best_only=True) #Early Stopping early_stopping=EarlyStopping(monitor='val_loss',mode='min',verbose=1,patience=patience_on_early_stop) model=resnet50(input_shape,1) model.summary() model.compile(loss='binary_crossentropy',optimizer=optimizer,metrics=['accuracy']) history = model.fit(train_ds, validation_data=val_ds, epochs=epochs, batch_size=batch_size, callbacks=[tensorboard,reduce_lr,early_stopping,checkpointer]) # leave the following line commented out because the early stopping is in place #model.save(model_name) # plot the learning curves # pyplot.plot(history.history['accuracy'],label='train') # pyplot.plot(history.history['val_accuracy'],label='validation') # pyplot.legend() # pyplot.show()