Karas 从网上下载的数据集都保存在下面的目录里
from tensorflow import keras
from tensorflow.keras import layers
cifar = keras.datasets.cifar10
(train_image, train_label), (test_image, test_label) = cifar.load_data()
从datasets中直接加载数据集
train_image.shape, test_image.shape
神经网络:最好做归一化处理(神经网络喜欢统一的,比较小的数据)
train_image = train_image/255
test_image = test_image/255
model = keras.Sequential()
model.add(layers.Conv2D(64, (3, 3), activation='relu', input_shape=(32, 32, 3)))
model.add(layers.Conv2D(64, (3, 3), activation='relu'))
model.add(layers.BatchNormalization())
model.add(layers.MaxPooling2D())
model.add(layers.Dropout(0.25))
model.add(layers.Conv2D(128, (3, 3), activation='relu'))
model.add(layers.Conv2D(128, (3, 3), activation='relu'))
model.add(layers.BatchNormalization())
model.add(layers.MaxPooling2D())
model.add(layers.Dropout(0.25))
model.add(layers.Conv2D(256, (3, 3), activation='relu'))
model.add(layers.Conv2D(256, (1, 1), activation='relu'))
model.add(layers.BatchNormalization())
model.add(layers.Dropout(0.25))
model.add(layers.GlobalAveragePooling2D())
model.add(layers.Dense(128))
model.add(layers.BatchNormalization())
model.add(layers.Dropout(0.5))
model.add(layers.Dense(10, activation='softmax'))
model.summary()
model.compile(optimizer='adam',
loss='sparse_categorical_crossentropy',
metrics=['acc']
)
history = model.fit(train_image, train_label, epochs=30, batch_size=128)
model.evaluate(test_image, test_label)
model.evaluate(test_image, test_label)