import sys import os from keras.optimizers import SGD from keras.optimizers import Adam from tensorflow.keras.callbacks import TensorBoard import keras from keras.src.legacy.preprocessing.image import ImageDataGenerator from keras.callbacks import ModelCheckpoint from tensorflow.keras.callbacks import EarlyStopping from keras.layers import Flatten from keras.layers import Dense from keras.layers import MaxPool2D from keras.layers import Dropout from keras.models import Model import numpy as np import tensorflow from inception_module import * import math from time import time # INCEPTION ARCHITECTIURE image_size = (128, 128) model_name='inception.h5.keras' train_ds = tensorflow.keras.preprocessing.image_dataset_from_directory( 'C:\\boneyard\\DeepLearning\\data', label_mode="binary", subset="training", validation_split=.20, image_size=image_size, color_mode='grayscale', batch_size=32, seed=50 ) val_ds = tensorflow.keras.preprocessing.image_dataset_from_directory( 'C:\\boneyard\\DeepLearning\\data', label_mode="binary", subset="validation", validation_split=.20, image_size=image_size, color_mode='grayscale', batch_size=32, seed=50 ) input_shape=(128,128,1) epochs=125 initial_lrate=0.01 batch_size=256 patience_on_early_stop=10 metric='auxilliary_output_2_accuracy' def decay(epoch,steps=100): initial_lrate=.01 drop=.96 epochs_drop=8 lrate=initial_lrate*math.pow(drop,math.floor((1+epoch)/epochs_drop)) return lrate lr_schedule=keras.callbacks.LearningRateScheduler(decay,verbose=1) # Create a TensorBoard instance with the path to the logs directory. Before training, in Terminal, run tensorboard --logdir=logs/ tensorboard = TensorBoard(log_dir='logs/{}'.format(time())) # OPTIMIZER optimizer=SGD(learning_rate=initial_lrate, momentum=0.9, nesterov=False) #INPUT LAYER input_layer=keras.layers.Input(shape=input_shape) kernel_init=keras.initializers.glorot_uniform() bias_init=keras.initializers.Constant(value=.2) x=Conv2D(64, (7, 7), padding='same', strides=(2, 2), activation='relu', name='conv_1_7x7_2', kernel_initializer=kernel_init, bias_initializer=bias_init)(input_layer) x=MaxPool2D((3, 3), padding='same', strides=(2, 2), name='max_pool_1_3x3_2')(x) x=Conv2D(192, (3, 3), padding='same', strides=(1, 1), activation='relu', name='conv_2b_3x3_1')(x) x=MaxPool2D((3, 3), padding='same', strides=(2, 2), name='max_pool_2_3x3_2')(x) #BUILD THE INCEPTION MODULE AND MAX POOLING LAYERS (3a and 3b) x=inception_module(x,filters_1x1=64,filters_3x3_reduce=96,filters_3x3=128,filters_5x5_reduce=16,filters_5x5=32,filters_pool_proj=32,kernel_init=kernel_init,bias_init=bias_init,name='inception_3a') x=inception_module(x,filters_1x1=128,filters_3x3_reduce=128,filters_3x3=192,filters_5x5_reduce=32,filters_5x5=96,filters_pool_proj=64,kernel_init=kernel_init,bias_init=bias_init,name='inception_3b') x=MaxPool2D((3, 3), padding='same', strides=(2, 2), name='max_pool_3_3x3_2')(x) #BUILD THE INCEPTION MODULE AND MAX POOLING LAYERS (4a) x=inception_module(x,filters_1x1=192,filters_3x3_reduce=96,filters_3x3=208,filters_5x5_reduce=16,filters_5x5=48,filters_pool_proj=64,kernel_init=kernel_init,bias_init=bias_init,name='inception_4a') #BUILD THE CLASSIFIERS classifier_1 = keras.layers.AveragePooling2D((5, 5), strides=3)(x) classifier_1 = Conv2D(128, (1, 1), padding='same', activation='relu')(classifier_1) classifier_1 = Flatten()(classifier_1) classifier_1 = Dense(1024, activation='relu')(classifier_1) classifier_1 = Dropout(0.7)(classifier_1) classifier_1 = Dense(1, activation='sigmoid', name='auxilliary_output_1')(classifier_1) #BUILD THE INCEPTION MODULE AND MAX POOLING LAYERS (4b, 4c, 4d)) x=inception_module(x,filters_1x1=160,filters_3x3_reduce=112,filters_3x3=224,filters_5x5_reduce=24,filters_5x5=64,filters_pool_proj=64,kernel_init=kernel_init,bias_init=bias_init,name='inception_4b') x=inception_module(x,filters_1x1=128,filters_3x3_reduce=128,filters_3x3=256,filters_5x5_reduce=24,filters_5x5=64,filters_pool_proj=64,kernel_init=kernel_init,bias_init=bias_init,name='inception_4c') x=inception_module(x,filters_1x1=112,filters_3x3_reduce=144,filters_3x3=288,filters_5x5_reduce=32,filters_5x5=64,filters_pool_proj=64,kernel_init=kernel_init,bias_init=bias_init,name='inception_4d') #BUILD THE CLASSIFIERS classifier_2 = keras.layers.AveragePooling2D((5, 5), strides=3)(x) classifier_2 = Conv2D(128, (1, 1), padding='same', activation='relu')(classifier_2) classifier_2 = Flatten()(classifier_2) classifier_2 = Dense(1024, activation='relu')(classifier_2) classifier_2 = Dropout(0.7)(classifier_2) classifier_2 = Dense(1, activation='sigmoid', name='auxilliary_output_2')(classifier_2) #BUILD THE INCEPTION MODULE AND MAX POOLING LAYERS (4e)) x=inception_module(x,filters_1x1=256,filters_3x3_reduce=160,filters_3x3=320,filters_5x5_reduce=32,filters_5x5=128,filters_pool_proj=128,kernel_init=kernel_init,bias_init=bias_init,name='inception_4e') x=MaxPool2D((3,3),padding='same',strides=(2,2),name='max_pool_4_3x3_2')(x) #BUILD MODULES 5a and 5b x=inception_module(x,filters_1x1=256,filters_3x3_reduce=160,filters_3x3=320,filters_5x5_reduce=32,filters_5x5=128,filters_pool_proj=128,kernel_init=kernel_init,bias_init=bias_init,name='inception_5a') x=inception_module(x,filters_1x1=384,filters_3x3_reduce=192,filters_3x3=384,filters_5x5_reduce=48,filters_5x5=128,filters_pool_proj=128,kernel_init=kernel_init,bias_init=bias_init,name='inception_5b') #BUILD THE CLASSIFIER x=keras.layers.AveragePooling2D(pool_size=(2,2), strides=1, padding='valid',name='avg_pool_5_3x3_1')(x) x=Dropout(0.4)(x) x = Dense(1000, activation='relu', name='linear')(x) x=Flatten()(x) x=Dense(1,activation='sigmoid',name='output')(x) # Checkpointer checkpointer=ModelCheckpoint(filepath=model_name,monitor=metric,verbose=1,save_best_only=True) #Early Stopping early_stopping=EarlyStopping(monitor='val_loss',mode='min',verbose=1,patience=patience_on_early_stop) model = Model(input_layer, [x, classifier_1, classifier_2], name='googlenet_complete_architecture') model.summary() model.compile(loss=['binary_crossentropy', 'binary_crossentropy', 'binary_crossentropy'], loss_weights=[1, 0.3, 0.3], optimizer=optimizer, metrics=['accuracy','accuracy','accuracy']) history = model.fit(train_ds, validation_data=val_ds, epochs=epochs, batch_size=batch_size, callbacks=[tensorboard,lr_schedule,checkpointer,early_stopping]) #model.save(model_name) # plot the learning curves # pyplot.plot(history.history['output_accuracy'],label='train') # pyplot.plot(history.history['val_auxilliary_output_2_accuracy'],label='validation') # pyplot.legend() # pyplot.show()