八、Inception V1的网络结构代码实现

目录

前文

数据生成器+数据部分展示

# 读取数据
from keras.preprocessing.image import ImageDataGenerator

IMSIZE = 224
train_generator = ImageDataGenerator(rescale=1. / 255).flow_from_directory('../../data/data_inception/train',
                                                                           target_size=(IMSIZE, IMSIZE),
                                                                           batch_size=100,
                                                                           class_mode='categorical'
                                                                           )

validation_generator = ImageDataGenerator(rescale=1. / 255).flow_from_directory('../../data/data_inception/test',
                                                                                target_size=(IMSIZE, IMSIZE),
                                                                                batch_size=100,
                                                                                class_mode='categorical')

八、Inception V1的网络结构代码实现

# 展示数据
from matplotlib import pyplot as plt

plt.figure()
fig, ax = plt.subplots(2, 5)
fig.set_figheight(7)
fig.set_figwidth(15)
ax = ax.flatten()
X, Y = next(train_generator)
for i in range(10): ax[i].imshow(X[i, :, :,: ])

八、Inception V1的网络结构代码实现

Inception V1

#相比于之前,这里需要导入concatenate函数
from keras.layers import Conv2D, BatchNormalization, MaxPooling2D
from keras.layers import Flatten, Dropout, Dense, Input, concatenate
from keras import Model

input_layer = Input([IMSIZE, IMSIZE, 3])
x = input_layer
x = Conv2D(64, (7, 7), strides=(2, 2), padding='same', activation='relu')(x)
x = BatchNormalization(axis=3)(x)
x = MaxPooling2D(pool_size=(3, 3), strides=(2, 2), padding='same')(x)

x = Conv2D(192, (3, 3), strides=(1, 1), padding='same', activation='relu')(x)
x = BatchNormalization(axis=3)(x)  #para=4*192=768
x = MaxPooling2D(pool_size=(3, 3), strides=(2, 2), padding='same')(x)
x

八、Inception V1的网络结构代码实现

for i in range(9):
    brach1x1 = Conv2D(64, (1, 1), strides=(1, 1), padding='same', activation='relu')(x)
    brach1x1 = BatchNormalization(axis=3)(brach1x1)
    brach3x3 = Conv2D(96, (1, 1), strides=(1, 1), padding='same', activation='relu')(x)
    brach3x3 = BatchNormalization(axis=3)(brach3x3)

    brach3x3 = Conv2D(128, (3, 3), strides=(1, 1), padding='same', activation='relu')(brach3x3)
    brach3x3 = BatchNormalization(axis=3)(brach3x3)

    brach5x5 = Conv2D(16, (1, 1), strides=(1, 1), padding='same', activation='relu')(x)
    brach5x5 = BatchNormalization(axis=3)(brach5x5)

    brach5x5 = Conv2D(32, (3, 3), strides=(1, 1), padding='same', activation='relu')(brach5x5)
    brach5x5 = BatchNormalization(axis=3)(brach5x5)

    branchpool = MaxPooling2D(pool_size=(3, 3), strides=(1, 1), padding='same')(x)
    branchpool = Conv2D(32, (1, 1), strides=(1, 1), padding='same', activation='relu')(branchpool)
    branchpool = BatchNormalization(axis=3)(branchpool)
    x = concatenate([brach1x1, brach3x3, brach5x5, branchpool], axis=3)
    x = MaxPooling2D(pool_size=(3, 3), strides=(2, 2), padding='same')(x)

x = Dropout(0.4)(x)
x = Flatten()(x)
x = Dense(17, activation='softmax')(x)
output_layer = x
model = Model(input_layer, output_layer)
model.summary()

八、Inception V1的网络结构代码实现

Inception V1模型编译与拟合

#运行
from keras.optimizers import Adam

model.compile(loss='categorical_crossentropy', optimizer=Adam(lr=0.001), metrics=['accuracy'])
model.fit_generator(train_generator, epochs=20, validation_data=validation_generator)

八、Inception V1的网络结构代码实现

GitHub下载地址:

Tensorflow1.15深度学习

上一篇:javascript – Sonar可以从我的git存储库中提取代码并定期分析吗?


下一篇:邻接矩阵和邻接表存图