网站的主要功能,杭州国外网站推广公司,大连雄猫网络推广有限公司,惠州网站策划建设TensorFlow神经网络中间层的可视化 TensorFlow神经网络中间层的可视化1. 训练网络并保存为.h5文件2. 通过.h5文件导入网络3. 可视化网络中间层结果#xff08;1#xff09;索引取层可视化#xff08;2#xff09;通过名字取层可视化 TensorFlow神经网络中间层的可视化
1. … TensorFlow神经网络中间层的可视化 TensorFlow神经网络中间层的可视化1. 训练网络并保存为.h5文件2. 通过.h5文件导入网络3. 可视化网络中间层结果1索引取层可视化2通过名字取层可视化 TensorFlow神经网络中间层的可视化
1. 训练网络并保存为.h5文件
我们使用AlexNet为例任务是手写数字识别训练集使用手写数字集mnist。
网络的结构我们使用的是28x28的黑白图
网络搭建和训练的代码
# 最终版
import os.path
import tensorflow as tf
import matplotlib.pyplot as plt
import cv2# 画出训练过程的准确率和损失值的图像
def plotTrainHistory(history, train, val):plt.plot(history[train])plt.plot(history[val])plt.title(Train History)plt.xlabel(Epoch)plt.ylabel(train)plt.legend([train, validation], loc upper left)plt.show()(xTrain, yTrain), (xTest, yTest) tf.keras.datasets.mnist.load_data()xTrain tf.expand_dims(xTrain, axis 3)
xTest tf.expand_dims(xTest, axis 3)
print(f训练集数据大小{xTrain.shape})
print(f训练集标签大小{yTrain.shape})
print(f测试集数据大小{xTest.shape})
print(f测试集标签大小{yTest.shape})# 归一化
xTrainNormalize tf.cast(xTrain, tf.float32) / 255
xTestNormalize tf.cast(xTest, tf.float32) / 255
# 数据独热编码
yTrainOneHot tf.keras.utils.to_categorical(yTrain)
yTestOneHot tf.keras.utils.to_categorical(yTest)model tf.keras.models.Sequential([tf.keras.layers.Conv2D(filters 96, kernel_size 11, strides 4, input_shape (28, 28, 1),padding SAME, activation tf.keras.activations.relu),tf.keras.layers.BatchNormalization(),tf.keras.layers.MaxPool2D(pool_size 3, strides 2, padding SAME),tf.keras.layers.Conv2D(filters 256, kernel_size 5, strides 1,padding SAME, activation tf.keras.activations.relu),tf.keras.layers.BatchNormalization(),tf.keras.layers.MaxPool2D(pool_size 3, strides 2, padding SAME),tf.keras.layers.Conv2D(filters 384, kernel_size 3, strides 1,padding SAME, activation tf.keras.activations.relu),tf.keras.layers.Conv2D(filters 384, kernel_size 3, strides 1,padding SAME, activation tf.keras.activations.relu),tf.keras.layers.Conv2D(filters 256, kernel_size 3, strides 1,padding SAME, activation tf.keras.activations.relu),tf.keras.layers.MaxPool2D(pool_size 3, strides 2, padding SAME),tf.keras.layers.Flatten(),tf.keras.layers.Dense(4096, activation tf.keras.activations.relu),tf.keras.layers.Dropout(0.5),tf.keras.layers.Dense(4096, activation tf.keras.activations.relu),tf.keras.layers.Dropout(0.5),tf.keras.layers.Dense(10, activation tf.keras.activations.softmax)
])weightsPath ./AlexNetModel/callback tf.keras.callbacks.ModelCheckpoint(filepath weightsPath,save_best_only True,save_weights_only True,verbose 1
)model.compile(loss tf.keras.losses.CategoricalCrossentropy(),optimizer tf.keras.optimizers.Adam(),metrics [accuracy]
)model.summary()# 不存在就训练模型
print(参数文件不存在即将训练模型)
modelTrain model.fit(xTrainNormalize, yTrainOneHot, validation_split 0.2,epochs 20, batch_size 300, verbose 1, callbacks [callback]
)
model.save(./model.h5)
plotTrainHistory(modelTrain.history, loss, val_loss)
plotTrainHistory(modelTrain.history, accuracy, val_accuracy)2. 通过.h5文件导入网络
把刚才训练得到的模型重新读取并且重新加载数据集
import tensorflow as tf
import matplotlib.pyplot as plt
import numpy as npdef plot_images(images, number, path, title, gray False):plt.figure()plt.title(title)order 1for i in range(0, number):plt.subplot(3, 3, order)if gray:plt.imshow(images[:, :, 0, i], cmap gray)else:plt.imshow(images[:, :, 0, i])plt.colorbar()order order 1plt.savefig(./{}.png.format(path))plt.show()if __name__ __main__:weightsPath ./AlexNetModel/(xTrain, yTrain), (xTest, yTest) tf.keras.datasets.mnist.load_data()xTrain tf.expand_dims(xTrain, axis 3)xTest tf.expand_dims(xTest, axis 3)# print(f训练集数据大小{xTrain.shape})# print(f训练集标签大小{yTrain.shape})# print(f测试集数据大小{xTest.shape})# print(f测试集标签大小{yTest.shape})# 归一化xTrainNormalize tf.cast(xTrain, tf.float32) / 255xTestNormalize tf.cast(xTest, tf.float32) / 255# 数据独热编码yTrainOneHot tf.keras.utils.to_categorical(yTrain)yTestOneHot tf.keras.utils.to_categorical(yTest)model tf.keras.models.load_model(model.h5)model.summary()print(Layer Number, len(model.layers))sample xTrainNormalize[0]plt.imshow(sample)plt.colorbar()plt.savefig(./train.png)
3. 可视化网络中间层结果
测试的数字5 1索引取层可视化 model.layers中存放着这个神经网络的全部层它是一个list类型变量 AlexNet一共16层卷积层、全连接层、池化层等都算入全部存储在里面
model tf.keras.models.load_model(model.h5)
print(Layer Number, len(model.layers))可视化的时候我们取出一部分层然后来预测预测结果就是取出来这部分层的结果因此就看到了中间层的结果
output tf.keras.models.Sequential([tf.keras.layers.InputLayer(input_shape (28, 28, 1)),model.layers[0],model.layers[1],model.layers[2],
]).predict(sample)
print(output.shape, output.shape)
plot_images(output, 9, 5_Conv2D_BN_MP_1, str(output.shape))查看三层的结果即Conv2DBNMaxPool结果是 (28, 4, 1, 96)这里画出前9个 把这96个叠加在一起的结果
t output[:, :, 0, 0]
for i in range(1, output.shape[3]):t t output[:, :, 0, i]
plt.imshow(t)
plt.colorbar()
plt.savefig(./5_Conv2D_BN_MP_1_All.png)下面的代码是画出神经网络三个中间层的结果 import tensorflow as tf
import matplotlib.pyplot as plt
import numpy as npdef plot_images(images, number, path, title, gray False):plt.figure()plt.title(title)order 1for i in range(0, number):plt.subplot(3, 3, order)if gray:plt.imshow(images[:, :, 0, i], cmap gray)else:plt.imshow(images[:, :, 0, i])plt.colorbar()order order 1plt.savefig(./{}.png.format(path))plt.show()if __name__ __main__:weightsPath ./AlexNetModel/(xTrain, yTrain), (xTest, yTest) tf.keras.datasets.mnist.load_data()xTrain tf.expand_dims(xTrain, axis 3)xTest tf.expand_dims(xTest, axis 3)# print(f训练集数据大小{xTrain.shape})# print(f训练集标签大小{yTrain.shape})# print(f测试集数据大小{xTest.shape})# print(f测试集标签大小{yTest.shape})# 归一化xTrainNormalize tf.cast(xTrain, tf.float32) / 255xTestNormalize tf.cast(xTest, tf.float32) / 255# 数据独热编码yTrainOneHot tf.keras.utils.to_categorical(yTrain)yTestOneHot tf.keras.utils.to_categorical(yTest)model tf.keras.models.load_model(model.h5)model.summary()print(Layer Number, len(model.layers))sample xTrainNormalize[0]plt.imshow(sample)plt.colorbar()plt.savefig(./train.png)output tf.keras.models.Sequential([tf.keras.layers.InputLayer(input_shape (28, 28, 1)),model.layers[0],model.layers[1],model.layers[2],]).predict(sample)print(output.shape, output.shape)plot_images(output, 9, 5_Conv2D_BN_MP_1, str(output.shape))t output[:, :, 0, 0]for i in range(1, output.shape[3]):t t output[:, :, 0, i]plt.imshow(t)plt.colorbar()plt.savefig(./5_Conv2D_BN_MP_1_All.png)output tf.keras.models.Sequential([tf.keras.layers.InputLayer(input_shape(28, 28, 1)),model.layers[0],model.layers[1],model.layers[2],model.layers[3],model.layers[4],model.layers[5],]).predict(sample)print(output.shape, output.shape)plot_images(output, 9, 5_Conv2D_BN_MP_2, str(output.shape))t output[:, :, 0, 0]for i in range(1, output.shape[3]):t t output[:, :, 0, i]plt.imshow(t)plt.colorbar()plt.savefig(./5_Conv2D_BN_MP_2_All.png)output tf.keras.models.Sequential([tf.keras.layers.InputLayer(input_shape(28, 28, 1)),model.layers[0],model.layers[1],model.layers[2],model.layers[3],model.layers[4],model.layers[5],model.layers[6],model.layers[7],model.layers[8],model.layers[9],]).predict(sample)print(output.shape, output.shape)plot_images(output, 9, 5_Conv2D_3_MP, str(output.shape))t output[:, :, 0, 0]for i in range(1, output.shape[3]):t t output[:, :, 0, i]plt.imshow(t)plt.colorbar()plt.savefig(./5_Conv2D_3_MP_All.png)
0和5的结果 2通过名字取层可视化
模型的**summary()**成员函数可以查看网络每一层名字和参数情况
model.summary()博客中使用的AlexNet每一层名字和参数情况 通过名字来取中间层并且预测得到中间层可视化结果 如果我们要看这个池化层的结果这样写代码
model tf.keras.models.load_model(../model.h5)
model.summary()sample xTrainNormalize[0]
plt.imshow(sample)
plt.colorbar()
plt.savefig(./train.png)output tf.keras.models.Model(inputsmodel.get_layer(conv2d).input,outputsmodel.get_layer(max_pooling2d).output
).predict(sample)通过get_layer获取指定名字的层
inputs指定输入层outputs指定输出层
每一层的名字可以在创建的时候使用name参数指定
...
tf.keras.layers.Conv2D(filters 96, kernel_size 11, strides 4, input_shape (28, 28, 1),padding SAME, activation tf.keras.activations.relu, name Conv2D_1),
...每一层的名字红色框框出 下面是例子
import tensorflow as tf
import matplotlib.pyplot as plt
import numpy as npdef plot_images(images, number, path, title, gray False):plt.figure()plt.title(title)order 1for i in range(0, number):plt.subplot(3, 3, order)if gray:plt.imshow(images[:, :, 0, i], cmap gray)else:plt.imshow(images[:, :, 0, i])plt.colorbar()order order 1plt.savefig(./{}.png.format(path))plt.show()if __name__ __main__:(xTrain, yTrain), (xTest, yTest) tf.keras.datasets.mnist.load_data()xTrain tf.expand_dims(xTrain, axis 3)xTest tf.expand_dims(xTest, axis 3)# 归一化xTrainNormalize tf.cast(xTrain, tf.float32) / 255xTestNormalize tf.cast(xTest, tf.float32) / 255# 数据独热编码yTrainOneHot tf.keras.utils.to_categorical(yTrain)yTestOneHot tf.keras.utils.to_categorical(yTest)model tf.keras.models.load_model(../model.h5)model.summary()sample xTrainNormalize[0]plt.imshow(sample)plt.colorbar()plt.savefig(./train.png)output tf.keras.models.Model(inputsmodel.get_layer(conv2d).input,outputsmodel.get_layer(max_pooling2d).output).predict(sample)# output tf.keras.models.Sequential([# tf.keras.layers.InputLayer(input_shape (28, 28, 1)),# model.layers[0],# model.layers[1],# model.layers[2],# ]).predict(sample)print(output.shape, output.shape)# plot_images(output, 9, 5_Conv2D_BN_MP_1, str(output.shape))