云计算课程实验
You can not select more than 25 topics Topics must start with a letter or number, can include dashes ('-') and can be up to 35 characters long.

54 lines
1.8 KiB

3 years ago
  1. import os
  2. import tensorflow as tf
  3. import matplotlib.pyplot as plt
  4. import numpy as np
  5. mnist = tf.keras.datasets.mnist
  6. (x_train, y_train),(x_test, y_test) = mnist.load_data(path="/data/data/mnist.npz") #加载mnist数据集
  7. #验证mnist数据集大小。x为数据,y为标签。mnist每张图的像素为28*28
  8. print(x_train.shape)
  9. print(y_train.shape)
  10. print(x_test.shape)
  11. print(y_test.shape)
  12. #打印训练集中前9张,看看是什么数字
  13. for i in range(9):
  14. plt.subplot(3,3,1+i)
  15. plt.imshow(x_train[i], cmap='gray')
  16. plt.show()
  17. #打印相应的标签
  18. print(y_train[:9])
  19. #基操:将像素标准化一下
  20. x_train, x_test = x_train / 255.0, x_test / 255.0
  21. #搭建一个两层神经网络
  22. model = tf.keras.models.Sequential([
  23. tf.keras.layers.Flatten(input_shape=(28, 28)), #拉伸图像成一维向量
  24. tf.keras.layers.Dense(128, activation='relu'), #第一层全连接+ReLU激活
  25. tf.keras.layers.Dropout(0.2), #dropout层
  26. tf.keras.layers.Dense(10, activation='softmax') #第二层全连接+softmax激活,输出预测标签
  27. ])
  28. #设置训练超参,优化器为sgd,损失函数为交叉熵,训练衡量指标为accuracy
  29. model.compile(optimizer='sgd', loss='sparse_categorical_crossentropy', metrics=['accuracy'])
  30. #开始训练,训练5个epoch,一个epoch代表所有图像计算一遍。每一个epoch能观察到训练精度的提升
  31. model.fit(x_train, y_train, epochs=5)
  32. #计算训练了5个epoch的模型在测试集上的表现
  33. model.evaluate(x_test, y_test)
  34. #直观看一下模型预测结果,打印测试集中的前9张图像
  35. for i in range(9):
  36. plt.subplot(3,3,1+i)
  37. plt.imshow(x_test[i], cmap='gray')
  38. plt.show()
  39. #打印模型识别的数字,是否正确?
  40. np.argmax(model(x_test[:9]).numpy(), axis=1)
  41. #保存训练好的模型
  42. model.save("/data/output/model_epoch_5")