我正在尝试根据本文在 Keras 中对 ResNet-12 进行编码。但是我在第 8 层有一个错误,并且在我下面的代码中,problelem 在函数 Layer_Type3 中。
我看不出问题出在哪里,有人可以帮忙吗?提前致谢。
错误是:
ValueError: 操作数不能与形状一起广播 (128, 128, 16) (126, 126, 16)
def Layer_Type1(n_output):
# n_output: number of feature maps in the block
# upscale: should we use the 1x1 conv2d mapping for shortcut or not
# keras functional api: return the function of type
# Tensor -> Tensor
def f(x):
# convolution
h = Conv2D(kernel_size=3, filters=n_output, strides=1, padding='SAME',kernel_regularizer=regularizers.l2(0.01))(x)
# second pre-activation
h = BatchNormalization()(h)
h = Activation(relu)(h)
return h
return f
def Layer_Type2(n_output):
# n_output: number of feature maps in the block
# upscale: should we use the 1x1 conv2d mapping for shortcut or not
# keras functional api: return the function of type
# Tensor -> Tensor
def f(x):
# first convolution
h = Layer_Type1(n_output)(x)
# second convolution
h = Conv2D(kernel_size=3, filters=n_output , strides=1, padding='SAME',kernel_regularizer=regularizers.l2(0.01))(h)
# second pre-activation
h = BatchNormalization()(h)
# F_l(x) = f(x) + H_l(x):
return add([x, h])
return f
def Layer_Type3(n_output):
def f(x):
# first convolution
h = Layer_Type1(n_output)(x)
# second convolution
h = Conv2D(kernel_size=3 ,filters=n_output, strides=1,kernel_regularizer=regularizers.l2(0.01))(h)
# second pre-activation
h = BatchNormalization()(h)
h = AveragePooling2D(pool_size=(3,3), strides=2)(h)
# short cut
d = Conv2D(kernel_size=1, filters=n_output, strides=2)(x)
d =BatchNormalization()(d)
return add([d, h])
return f
def Layer_Type4(n_output):
def f(x):
# first convolution
h = Layer_Type1(n_output)(x)
# second convolution
h = Conv2D(kernel_size=3, filters=n_output, strides=1, kernel_regularizer=regularizers.l2(0.01))(h)
# second pre-activation
h = BatchNormalization()(h)
h = GlobalAveragePooling2D()(h)
return h
return f
input_tensor = Input((256,256,1))
## 2 Layers of type1 :
x= Layer_Type1(64)(input_tensor)
x= Layer_Type1(16)(x)
# 5 layers pf type 2:
for i in range(5) :
x = Layer_Type2(16)(x)
## 4 layers of type 3
x= Layer_Type3(16)(x) #1 # ########## Error here
x=Layer_Type3(64)(x) #2
x=Layer_Type3(128)(x) #3
x=Layer_Type3(256)(x) # 4
# 1 layer of type 4 :
x=Layer_Type4(512)(x)
x = Dropout(0.2)(x)
# last softmax layer
x = Dense(units=2, kernel_regularizer=regularizers.l2(0.01))(x)
x = Activation(softmax)(x)
model = Model(inputs=input_tensor, outputs=x)
model.compile(loss='categorical_crossentropy', optimizer='adam', metrics=['accuracy'])