106 lines
3.0 KiB
Python
106 lines
3.0 KiB
Python
import sys
|
|
import os
|
|
|
|
from keras.optimizers import SGD
|
|
from keras.optimizers import Adam
|
|
from tensorflow.keras.callbacks import TensorBoard
|
|
import keras
|
|
from keras.src.legacy.preprocessing.image import ImageDataGenerator
|
|
from keras.callbacks import ModelCheckpoint
|
|
from tensorflow.keras.callbacks import EarlyStopping
|
|
from keras.layers import Flatten
|
|
from keras.layers import Dense
|
|
from keras.layers import MaxPool2D
|
|
from keras.layers import Dropout
|
|
from keras.models import Model
|
|
from keras.layers import AveragePooling2D
|
|
from keras.models import Sequential
|
|
import numpy as np
|
|
import tensorflow
|
|
from inception_module import *
|
|
import math
|
|
from time import time
|
|
|
|
#Load some data
|
|
#(x_train,y_train), (x_test,y_test)=cifar10.load_data()
|
|
|
|
base_filters=6
|
|
image_size = (128, 128)
|
|
batch_size = 256
|
|
learning_rate=.00001
|
|
epochs=1000
|
|
patience_on_early_stop=10
|
|
model_name='lenet5.h5.keras'
|
|
|
|
train_ds = tensorflow.keras.preprocessing.image_dataset_from_directory(
|
|
'C:\\boneyard\\DeepLearning\\data',
|
|
label_mode="binary",
|
|
subset="training",
|
|
validation_split=.20,
|
|
image_size=image_size,
|
|
color_mode='grayscale',
|
|
batch_size=64,
|
|
seed=1337
|
|
)
|
|
val_ds = tensorflow.keras.preprocessing.image_dataset_from_directory(
|
|
'C:\\boneyard\\DeepLearning\\data',
|
|
label_mode="binary",
|
|
subset="validation",
|
|
validation_split=.20,
|
|
image_size=image_size,
|
|
color_mode='grayscale',
|
|
batch_size=64,
|
|
seed=1337
|
|
)
|
|
|
|
# Create a TensorBoard instance with the path to the logs directory. Before training, in Terminal, run tensorboard --logdir=logs/
|
|
tensorboard = TensorBoard(log_dir='logs/{}'.format(time()))
|
|
|
|
model=Sequential()
|
|
|
|
#C1
|
|
model.add(Conv2D(filters=base_filters,kernel_size=(3,3),strides=1,activation='relu',input_shape=(128,128,1),padding='same'))
|
|
model.add(AveragePooling2D(pool_size=(2,2),strides=(2,2),padding='valid'))
|
|
model.add(BatchNormalization())
|
|
|
|
#C2
|
|
model.add(Conv2D(filters=base_filters*8,kernel_size=(3,3),strides=1,activation='relu',padding='valid'))
|
|
model.add(AveragePooling2D(pool_size=(2,2),strides=(2,2),padding='valid'))
|
|
model.add(Dropout(.20))
|
|
|
|
#C3
|
|
model.add(Conv2D(filters=base_filters*16,kernel_size=(3,3),strides=1,activation='relu',padding='valid'))
|
|
model.add(BatchNormalization())
|
|
|
|
#C4
|
|
model.add(Flatten())
|
|
|
|
#FC6
|
|
model.add(Dense(units=84,activation='relu'))
|
|
|
|
#FC7
|
|
model.add(Dense(units=1,activation='sigmoid'))
|
|
|
|
model.summary()
|
|
|
|
optimizer=SGD(learning_rate=learning_rate, momentum=0.9, nesterov=False)
|
|
|
|
checkpointer=ModelCheckpoint(filepath=model_name,monitor='accuracy',verbose=1,save_best_only=True)
|
|
|
|
early_stopping=EarlyStopping(monitor='val_loss',mode='min',verbose=1,patience=patience_on_early_stop)
|
|
|
|
model.compile(loss='binary_crossentropy',optimizer=optimizer,metrics=['accuracy'])
|
|
|
|
history=model.fit(train_ds, epochs=epochs, validation_data=val_ds,callbacks=[tensorboard,checkpointer,early_stopping])
|
|
|
|
# plot the learning curves
|
|
# pyplot.plot(history.history['accuracy'],label='train')
|
|
# pyplot.plot(history.history['val_accuracy'],label='validation')
|
|
# pyplot.legend()
|
|
# pyplot.show()
|
|
|
|
|
|
|
|
|
|
|