最近看了卷积神经网络的一些经典论文,用Tensorflow2进行了复现,其中包括LeNet,AlexNet,VGG16,ResNet50,并且在数据集上进行了测试,能够正常训练,结果表明搭建的没有问题。
下面进入正题:
import tensorflow as tf
from tensorflow import keras
from tensorflow.keras import layers
import numpy as np
import matplotlib.pyplot as plt
%matplotlib inline
加载mnist数据集用于测试LeNet
mnist_data = np.load("E:/deep_learning/jupyternotebook工作空间/0531/mnist/mnist.npz")
x_train = mnist_data['x_train'] / 255.0
y_train = mnist_data['y_train'] / 255.0
x_test = mnist_data['x_test']
y_test = mnist_data['y_test']
print(x_train.shape)
print(x_test.shape)
print(y_train.shape)
print(y_test.shape)
(60000, 28, 28)
(10000, 28, 28)
(60000,)
(10000,)
x_train0 = x_train.reshape((-1,28,28,1))
x_test0 = x_test.reshape((-1,28,28,1))
plt.imshow(x_test[0])
plt.title('The number is '+ str(y_test[0]))
Text(0.5, 1.0, 'The number is 7')
LeNet

LeNet模型网络结构示意图
def get_LeNet():
model = keras.Sequential(name='LeNet')
model.add(layers.Conv2D(filters=6,kernel_size=(5,5),strides=1,activation='sigmoid',
input_shape=(28,28,1),name='Conv1'))
model.add(layers.MaxPool2D(pool_size=(2,2),strides=2,name='Pool1'))
model.add(layers.Conv2D(16,(5,5),strides=1,activation='sigmoid',name='Conv2'))
model.add(layers.MaxPool2D(pool_size=(2,2),strides=2,name='Pool2'))
model.add(layers.Conv2D(120,(4,4),strides=1,activation='sigmoid',name='Conv3'))
model.add(layers.Flatten(name='Flatten'))
model.add(layers.Dense(64,activation='sigmoid',name='FC1'))
model.add(layers.Dense(10,activation='softmax',name='FC2'))
return model
LeNet = get_LeNet()
LeNet.summary()
Model: "LeNet"
_________________________________________________________________
Layer (type) Output Shape Param #
=================================================================
Conv1 (Conv2D) (None, 24, 24, 6) 156
_________________________________________________________________
Pool1 (MaxPooling2D) (None, 12, 12, 6) 0
_________________________________________________________________
Conv2 (Conv2D) (None, 8, 8, 16) 2416
_________________________________________________________________
Pool2 (MaxPooling2D) (None, 4, 4, 16) 0
_________________________________________________________________
Conv3 (Conv2D) (None, 1, 1, 120) 30840
_________________________________________________________________
Flatten (Flatten) (None, 120) 0
_________________________________________________________________
FC1 (Dense) (None, 64) 7744
_________________________________________________________________
FC2 (Dense) (None, 10) 650
=================================================================
Total params: 41,806
Trainable params: 41,806
Non-trainable params: 0
_________________________________________________________________
由上图可知,LeNet为5层网络,共41806个参数
#训练
LeNet.compile(optimizer=keras.optimizers.Adam(learning_rate=0.001),
loss='sparse_categorical_crossentropy', #计算稀疏的分类交叉熵损失
metrics=['accuracy'])
LeNet.fit(x_train0,y_train,batch_size=64,epochs=1,validation_split=0.15)
LeNet.evaluate(x_test0,y_test,batch_size=64)
AlexNet
def alexNet_model(numclasses):
alexNet = keras.Sequential(name='AlexNet')
#第一层
alexNet.add(layers.Conv2D(filters=96,kernel_size=(11,11),strides=(4,4),
activation='relu',input_shape=(227,227,3),name='Conv1'))
alexNet.add(layers.BatchNormalization(name='BN1'))
alexNet.add(layers.MaxPool2D(pool_size=(3,3),strides=(2,2),name='Pool1'))
#第二层
alexNet.add(layers.Conv2D(filters=256,kernel_size=(5,5),strides=(1,1),padding='same',
activation='relu',name='Conv2'))
alexNet.add(layers.BatchNormalization(name='BN2'))
alexNet.add(layers.MaxPool2D(pool_size=(3,3),strides=(2,2),name='Pool2'))
#第三层
alexNet.add(layers.Conv2D(filters=384,kernel_size=(3,3),strides=(1,1),padding='same',
activation='relu',name='Conv3'))
#第四层
alexNet.add(layers.Conv2D(filters=192,kernel_size=(3,3),strides=(1,1),padding='same',
activation='relu',name='Conv4'))
#第五层
alexNet.add(layers.Conv2D(filters=256,kernel_size=(3,3),strides=(1,1),padding=