import sys import os from keras.layers.pooling import MaxPool2D sys.path.append('c:/git/keras') sys.path.append('c:/git/absl') # installed # py -mpip install numpy # py -mpip show numpy # py -mpip install tensorflow # py -mpip show tensorflow # py -mpip install matplotlib # c:\users\skess\appdata\local\programs\python\python39\lib\site-packages import tensorflow import keras from keras.optimizers import adam_v2 from tensorflow.keras.optimizers import Adam from keras.datasets import cifar10 from keras.preprocessing.image import ImageDataGenerator from keras.models import Sequential from keras.utils import np_utils from keras.layers import Dense from keras.layers import Activation from keras.layers import Flatten from keras.layers import Dropout from keras.layers import BatchNormalization from keras.layers import Conv2D from keras.layers import MaxPooling2D from keras.layers import AveragePooling2D from keras.callbacks import EarlyStopping, ModelCheckpoint from keras import regularizers from keras import optimizers from matplotlib import pyplot from keras.callbacks import ModelCheckpoint from keras.callbacks import LearningRateScheduler from keras.preprocessing.image import load_img from keras.preprocessing.image import img_to_array from keras.preprocessing.image import array_to_img from keras.optimizer_v2 import gradient_descent #Load some data #(x_train,y_train), (x_test,y_test)=cifar10.load_data() base_filters=6 image_size = (128, 128) batch_size = 256 learning_rate=.00001 epochs=1000 patience_on_early_stop=10 model_name='lenet5.h5' train_ds = tensorflow.keras.preprocessing.image_dataset_from_directory( 'C:\\boneyard\\DeepLearning\\data', label_mode="binary", subset="training", validation_split=.20, image_size=image_size, color_mode='grayscale', batch_size=64, seed=1337 ) val_ds = tensorflow.keras.preprocessing.image_dataset_from_directory( 'C:\\boneyard\\DeepLearning\\data', label_mode="binary", subset="validation", validation_split=.20, image_size=image_size, color_mode='grayscale', batch_size=64, seed=1337 ) model=Sequential() #C1 model.add(Conv2D(filters=base_filters,kernel_size=(3,3),strides=1,activation='relu',input_shape=(128,128,1),padding='same')) model.add(AveragePooling2D(pool_size=(2,2),strides=(2,2),padding='valid')) model.add(BatchNormalization()) #C2 model.add(Conv2D(filters=base_filters*8,kernel_size=(3,3),strides=1,activation='relu',padding='valid')) model.add(AveragePooling2D(pool_size=(2,2),strides=(2,2),padding='valid')) model.add(Dropout(.20)) #C3 model.add(Conv2D(filters=base_filters*16,kernel_size=(3,3),strides=1,activation='relu',padding='valid')) model.add(BatchNormalization()) #C4 model.add(Flatten()) #FC6 model.add(Dense(units=84,activation='relu')) #FC7 model.add(Dense(units=1,activation='sigmoid')) model.summary() optimizer=gradient_descent.SGD(learning_rate=learning_rate, momentum=0.9, nesterov=False) checkpointer=ModelCheckpoint(filepath=model_name,monitor='accuracy',verbose=1,save_best_only=True) early_stopping=EarlyStopping(monitor='val_loss',mode='min',verbose=1,patience=patience_on_early_stop) model.compile(loss='binary_crossentropy',optimizer=optimizer,metrics=['accuracy']) history=model.fit(train_ds, epochs=epochs, validation_data=val_ds,callbacks=[checkpointer,early_stopping]) # plot the learning curves pyplot.plot(history.history['accuracy'],label='train') pyplot.plot(history.history['val_accuracy'],label='validation') pyplot.legend() pyplot.show()