from itertools import filterfalse import sys import os #from keras.layers.pooling import MaxPool2D #from numpy.lib.shape_base import expand_dims #sys.path.append('c:/git/keras') #sys.path.append('c:/git/absl') # installed # py -mpip install numpy # py -mpip show numpy # py -mpip install tensorflow # py -mpip show tensorflow # py -mpip install matplotlib # c:\users\skess\appdata\local\programs\python\python39\lib\site-packages import keras #from keras.optimizers import adam_v2 #from tensorflow.keras.optimizers import Adam #from keras.datasets import cifar10 #from keras.preprocessing.image import ImageDataGenerator #from keras.models import Sequential #from keras.utils import np_utils #from keras.layers import Dense from keras.layers import Activation #from keras.layers import Flatten #from keras.layers import Dropout from keras.layers import BatchNormalization from keras.layers import Conv2D #from keras.layers import MaxPooling2D #from keras.callbacks import ModelCheckpoint #from tensorflow.keras.callbacks import EarlyStopping from keras import regularizers from keras import optimizers from matplotlib import pyplot import numpy as np import tensorflow def bottleneck_residual_block(x,kernel_size,filters,reduce=False,s=2): F1, F2, F3 = filters x_shortcut=x if reduce: x_shortcut=Conv2D(filters=F3,kernel_size=(1,1),strides=(s,s))(x_shortcut) x_shortcut=BatchNormalization(axis=3)(x_shortcut) x=Conv2D(filters=F1,kernel_size=(1,1),strides=(s,s),padding='valid')(x) x=BatchNormalization(axis=3)(x) x=Activation('relu')(x) else: x=Conv2D(filters=F1,kernel_size=(1,1),strides=(1,1),padding='valid',kernel_initializer = keras.initializers.glorot_uniform(seed=0))(x) x=BatchNormalization(axis=3)(x) x=Activation('relu')(x) x=Conv2D(filters=F2,kernel_size=kernel_size,strides=(1,1),padding='same',kernel_initializer = keras.initializers.glorot_uniform(seed=0))(x) x=BatchNormalization(axis=3)(x) x=Activation('relu')(x) x=Conv2D(filters=F3,kernel_size=(1,1),strides=(1,1),padding='valid',kernel_initializer = keras.initializers.glorot_uniform(seed=0))(x) x=BatchNormalization(axis=3)(x) x=keras.layers.Add()([x,x_shortcut]) x=Activation('relu')(x) return x