Updated all models October 2024

This commit is contained in:
2024-10-27 09:08:34 -04:00
parent f7011b5554
commit 08b0bb2d0c
7 changed files with 164 additions and 213 deletions

View File

@@ -1,52 +1,28 @@
import sys
import os
from keras.layers.pooling import MaxPool2D
from keras.optimizer_v2 import gradient_descent
sys.path.append('c:/git/keras')
sys.path.append('c:/git/absl')
# installed
# py -mpip install numpy
# py -mpip show numpy
# py -mpip install tensorflow
# py -mpip show tensorflow
# py -mpip install matplotlib
# c:\users\skess\appdata\local\programs\python\python39\lib\site-packages
from keras.optimizers import SGD
from keras.optimizers import Adam
from tensorflow.keras.callbacks import TensorBoard
import keras
from keras.models import Model
#from keras.optimizers import *
#from keras.optimizers import adam_v2
from tensorflow.keras.optimizers import Adam
from keras.datasets import cifar10
from keras.preprocessing.image import ImageDataGenerator
from keras.models import Sequential
from keras.utils import np_utils
from keras.layers import Dense
from keras.layers import Activation
from keras.layers import Flatten
from keras.layers import Dropout
from keras.layers import BatchNormalization
from keras.layers import Conv2D
from keras.layers import MaxPooling2D
from keras.src.legacy.preprocessing.image import ImageDataGenerator
from keras.callbacks import ModelCheckpoint
from tensorflow.keras.callbacks import EarlyStopping
from keras import regularizers
from keras import optimizers
from matplotlib import pyplot
from keras.layers import Flatten
from keras.layers import Dense
from keras.layers import MaxPool2D
from keras.layers import Dropout
from keras.models import Model
import numpy as np
import tensorflow
from inception_module import *
import math
from time import time
# INCEPTION ARCHITECTIURE
image_size = (128, 128)
#model_weights_file='inception.h5'
model_name='inception.h5'
model_name='inception.h5.keras'
train_ds = tensorflow.keras.preprocessing.image_dataset_from_directory(
'C:\\boneyard\\DeepLearning\\data',
@@ -86,9 +62,11 @@ def decay(epoch,steps=100):
lr_schedule=keras.callbacks.LearningRateScheduler(decay,verbose=1)
#sgd=SGD(lr=initial_lrate,momentum=0.9,nesterov=False)
optimizer=adam_v2.Adam(learning_rate=initial_lrate)
optimizer=gradient_descent.SGD(learning_rate=initial_lrate, momentum=0.9, nesterov=False)
# Create a TensorBoard instance with the path to the logs directory. Before training, in Terminal, run tensorboard --logdir=logs/
tensorboard = TensorBoard(log_dir='logs/{}'.format(time()))
# OPTIMIZER
optimizer=SGD(learning_rate=initial_lrate, momentum=0.9, nesterov=False)
#INPUT LAYER
input_layer=keras.layers.Input(shape=input_shape)
@@ -96,15 +74,15 @@ input_layer=keras.layers.Input(shape=input_shape)
kernel_init=keras.initializers.glorot_uniform()
bias_init=keras.initializers.Constant(value=.2)
x=Conv2D(64, (7, 7), padding='same', strides=(2, 2), activation='relu', name='conv_1_7x7/2', kernel_initializer=kernel_init, bias_initializer=bias_init)(input_layer)
x=MaxPool2D((3, 3), padding='same', strides=(2, 2), name='max_pool_1_3x3/2')(x)
x=Conv2D(192, (3, 3), padding='same', strides=(1, 1), activation='relu', name='conv_2b_3x3/1')(x)
x=MaxPool2D((3, 3), padding='same', strides=(2, 2), name='max_pool_2_3x3/2')(x)
x=Conv2D(64, (7, 7), padding='same', strides=(2, 2), activation='relu', name='conv_1_7x7_2', kernel_initializer=kernel_init, bias_initializer=bias_init)(input_layer)
x=MaxPool2D((3, 3), padding='same', strides=(2, 2), name='max_pool_1_3x3_2')(x)
x=Conv2D(192, (3, 3), padding='same', strides=(1, 1), activation='relu', name='conv_2b_3x3_1')(x)
x=MaxPool2D((3, 3), padding='same', strides=(2, 2), name='max_pool_2_3x3_2')(x)
#BUILD THE INCEPTION MODULE AND MAX POOLING LAYERS (3a and 3b)
x=inception_module(x,filters_1x1=64,filters_3x3_reduce=96,filters_3x3=128,filters_5x5_reduce=16,filters_5x5=32,filters_pool_proj=32,kernel_init=kernel_init,bias_init=bias_init,name='inception_3a')
x=inception_module(x,filters_1x1=128,filters_3x3_reduce=128,filters_3x3=192,filters_5x5_reduce=32,filters_5x5=96,filters_pool_proj=64,kernel_init=kernel_init,bias_init=bias_init,name='inception_3b')
x=MaxPool2D((3, 3), padding='same', strides=(2, 2), name='max_pool_3_3x3/2')(x)
x=MaxPool2D((3, 3), padding='same', strides=(2, 2), name='max_pool_3_3x3_2')(x)
#BUILD THE INCEPTION MODULE AND MAX POOLING LAYERS (4a)
x=inception_module(x,filters_1x1=192,filters_3x3_reduce=96,filters_3x3=208,filters_5x5_reduce=16,filters_5x5=48,filters_pool_proj=64,kernel_init=kernel_init,bias_init=bias_init,name='inception_4a')
@@ -134,26 +112,19 @@ classifier_2 = Dense(1, activation='sigmoid', name='auxilliary_output_2')(classi
#BUILD THE INCEPTION MODULE AND MAX POOLING LAYERS (4e))
x=inception_module(x,filters_1x1=256,filters_3x3_reduce=160,filters_3x3=320,filters_5x5_reduce=32,filters_5x5=128,filters_pool_proj=128,kernel_init=kernel_init,bias_init=bias_init,name='inception_4e')
x=MaxPool2D((3,3),padding='same',strides=(2,2),name='max_pool_4_3x3/2')(x)
x=MaxPool2D((3,3),padding='same',strides=(2,2),name='max_pool_4_3x3_2')(x)
#BUILD MODULES 5a and 5b
x=inception_module(x,filters_1x1=256,filters_3x3_reduce=160,filters_3x3=320,filters_5x5_reduce=32,filters_5x5=128,filters_pool_proj=128,kernel_init=kernel_init,bias_init=bias_init,name='inception_5a')
x=inception_module(x,filters_1x1=384,filters_3x3_reduce=192,filters_3x3=384,filters_5x5_reduce=48,filters_5x5=128,filters_pool_proj=128,kernel_init=kernel_init,bias_init=bias_init,name='inception_5b')
#BUILD THE CLASSIFIER
x=keras.layers.AveragePooling2D(pool_size=(2,2), strides=1, padding='valid',name='avg_pool_5_3x3/1')(x)
x=keras.layers.AveragePooling2D(pool_size=(2,2), strides=1, padding='valid',name='avg_pool_5_3x3_1')(x)
x=Dropout(0.4)(x)
x = Dense(1000, activation='relu', name='linear')(x)
x=Flatten()(x)
x=Dense(1,activation='sigmoid',name='output')(x)
#model = Model(input_layer, [x], name='googlenet')
#model.summary()
#checkpointer=ModelCheckpoint(filepath=model_weights_file,verbose=1,save_best_only=True)
#model.compile(loss='binary_crossentropy',optimizer=optimizer,metrics=['accuracy'])
#history=model.fit(train_ds, epochs=epochs, validation_data=val_ds,batch_size=batch_size,verbose=2,callbacks=[lr_schedule])
#history=model.fit(train_ds, epochs=epochs, validation_data=val_ds,batch_size=batch_size,verbose=2,callbacks=[checkpointer])
# Checkpointer
checkpointer=ModelCheckpoint(filepath=model_name,monitor=metric,verbose=1,save_best_only=True)
@@ -162,15 +133,15 @@ early_stopping=EarlyStopping(monitor='val_loss',mode='min',verbose=1,patience=pa
model = Model(input_layer, [x, classifier_1, classifier_2], name='googlenet_complete_architecture')
model.summary()
model.compile(loss=['binary_crossentropy', 'binary_crossentropy', 'binary_crossentropy'], loss_weights=[1, 0.3, 0.3], optimizer=optimizer, metrics=['accuracy'])
history = model.fit(train_ds, validation_data=val_ds, epochs=epochs, batch_size=batch_size, callbacks=[lr_schedule,checkpointer,early_stopping])
model.compile(loss=['binary_crossentropy', 'binary_crossentropy', 'binary_crossentropy'], loss_weights=[1, 0.3, 0.3], optimizer=optimizer, metrics=['accuracy','accuracy','accuracy'])
history = model.fit(train_ds, validation_data=val_ds, epochs=epochs, batch_size=batch_size, callbacks=[tensorboard,lr_schedule,checkpointer,early_stopping])
#model.save(model_name)
# plot the learning curves
pyplot.plot(history.history['output_accuracy'],label='train')
pyplot.plot(history.history['val_auxilliary_output_2_accuracy'],label='validation')
pyplot.legend()
pyplot.show()
# pyplot.plot(history.history['output_accuracy'],label='train')
# pyplot.plot(history.history['val_auxilliary_output_2_accuracy'],label='validation')
# pyplot.legend()
# pyplot.show()