0
# import the necessary packages
import keras
from keras.initializers import glorot_uniform
from keras.layers import AveragePooling2D, Input, Add
from keras.models import Model
from keras.layers.normalization import BatchNormalization
from keras.layers.convolutional import Conv2D
from keras.layers.convolutional import MaxPooling2D
from keras.layers.core import Activation
from keras.layers.core import Flatten
from keras.layers.core import Dropout
from keras.layers.core import Dense


class SmallerVGGNet:
    @staticmethod
    def build(width, height, depth, classes, finalact):

        X1 = Input(shape=(height, width, depth))

        # # CONV => RELU => POOL
        X = Conv2D(16, (3, 3), padding="same", strides=(1, 1), name="con_layer1")(X1)
        X = BatchNormalization(axis=3)(X)
        X = Activation("relu")(X)
        X = MaxPooling2D(pool_size=(3, 3), strides=(1, 1))(X)

        X = Conv2D(32, (3, 3), padding="same", strides=(2, 2), name="con_layer2")(X)
        X = BatchNormalization(axis=3)(X)
        X = Activation("relu")(X)

        X = Conv2D(32, (3, 3), padding="same", strides=(1, 1), name="con_layer3")(X)
        X = Activation("relu")(X)
        X = BatchNormalization(axis=3)(X)

        X = MaxPooling2D(pool_size=(3, 3), strides=(1, 1))(X)

        # First component
        X0 = Conv2D(256, (5, 5), strides=(1, 1), padding='same', kernel_initializer=glorot_uniform(seed=0))(X)
        X0 = BatchNormalization(axis=3)(X0)
        X0 = Activation("relu")(X0)

        # (CONV => RELU) * 2 => POOL
        X = Conv2D(64, (3, 3), padding="same", strides=(2, 2), name="con_layer4")(X0)
        X = BatchNormalization(axis=3)(X)
        X = Activation("relu")(X)

        X = Conv2D(64, (3, 3), padding="same", strides=(1, 1), name="con_layer5")(X)
        X = BatchNormalization(axis=3)(X)
        X = Activation("relu")(X)

        X = AveragePooling2D(pool_size=(3, 3), strides=(1, 1))(X)

        # Second Component
        X0 = Conv2D(512, (5, 5), strides=(1, 1), padding='valid', kernel_initializer=glorot_uniform(seed=0))(X)
        X0 = BatchNormalization(axis=3)(X0)
        X0 = Activation("relu")(X0)

        # (CONV => RELU) * 2 => POOL
        X = Conv2D(128, (3, 3), padding="same", strides=(2, 2), name="con_layer6")(X0)
        X = BatchNormalization(axis=3)(X)
        X = Activation("relu")(X)

        X = Conv2D(128, (3, 3), padding="same", strides=(1, 1), name="con_layer7")(X)
        X = BatchNormalization(axis=3)(X)
        X = Activation("relu")(X)

        X = MaxPooling2D(pool_size=(3, 3), strides=(1, 1))(X)

        # Third Component
        X0 = Conv2D(1024, (7, 7), strides=(2, 2), padding='valid', kernel_initializer=glorot_uniform(seed=0))(X)
        X0 = BatchNormalization(axis=3)(X0)
        X0 = Dense(128, activation="relu")(X0)
        X0 = Activation("relu")(X0)

        X = Flatten()(X1)
        X = BatchNormalization()(X)
        X = Dropout(0.5)(X)
        output = Dense(classes, activation=finalact)(X)

        model = Model(inputs=[X1], outputs=output)

        print(model.summary())
        return model

在残差网络中,它应该将正常层与残差或卷积块联系起来。根据我的代码,“X”是正常层,“X0”是残差块。最后,我想将这些图层添加在一起。如何将这两层加在一起,包括 aa relu 激活函数。

4

1 回答 1

0

X0 是你的残差块,X 是你的正常层。先导入 from keras import layers 再做 layers.add([X,X0])

于 2020-01-30T04:42:29.367 回答