44 lines
1.4 KiB
Python
44 lines
1.4 KiB
Python
from itertools import filterfalse
|
|
import sys
|
|
import os
|
|
import keras
|
|
from keras.layers import Activation
|
|
from keras.layers import BatchNormalization
|
|
from keras.layers import Conv2D
|
|
from keras import regularizers
|
|
from keras import optimizers
|
|
from matplotlib import pyplot
|
|
import numpy as np
|
|
import tensorflow
|
|
|
|
def bottleneck_residual_block(x,kernel_size,filters,reduce=False,s=2):
|
|
F1, F2, F3 = filters
|
|
x_shortcut=x
|
|
|
|
if reduce:
|
|
x_shortcut=Conv2D(filters=F3,kernel_size=(1,1),strides=(s,s))(x_shortcut)
|
|
x_shortcut=BatchNormalization(axis=3)(x_shortcut)
|
|
|
|
x=Conv2D(filters=F1,kernel_size=(1,1),strides=(s,s),padding='valid')(x)
|
|
x=BatchNormalization(axis=3)(x)
|
|
x=Activation('relu')(x)
|
|
else:
|
|
x=Conv2D(filters=F1,kernel_size=(1,1),strides=(1,1),padding='valid',kernel_initializer = keras.initializers.glorot_uniform(seed=0))(x)
|
|
x=BatchNormalization(axis=3)(x)
|
|
x=Activation('relu')(x)
|
|
|
|
x=Conv2D(filters=F2,kernel_size=kernel_size,strides=(1,1),padding='same',kernel_initializer = keras.initializers.glorot_uniform(seed=0))(x)
|
|
x=BatchNormalization(axis=3)(x)
|
|
x=Activation('relu')(x)
|
|
|
|
x=Conv2D(filters=F3,kernel_size=(1,1),strides=(1,1),padding='valid',kernel_initializer = keras.initializers.glorot_uniform(seed=0))(x)
|
|
x=BatchNormalization(axis=3)(x)
|
|
|
|
x=keras.layers.Add()([x,x_shortcut])
|
|
x=Activation('relu')(x)
|
|
|
|
return x
|
|
|
|
|
|
|