keras-基于CNN网络的Mnist数据集分类
1.数据的载入和预处理
import numpy as np from keras.datasets import mnist from keras.utils import np_utils from keras.models import Sequential from keras.layers import * from keras.optimizers import SGD,Adam from keras.regularizers import l2 from keras.utils.vis_utils import plot_model from matplotlib import pyplot as plt import os import tensorflow as tf # 载入数据 (x_train,y_train),(x_test,y_test) = mnist.load_data() # 预处理 # 将(60000,28,28)转化为(-1,28,28,1),最后1是图片深度 x_train = x_train.reshape(-1,28,28,1)/255.0 x_test= x_test.reshape(-1,28,28,1)/255.0 # 将输出转化为one_hot编码 y_train = np_utils.to_categorical(y_train,num_classes=10) y_test = np_utils.to_categorical(y_test,num_classes=10)
2.创建网络打印训练数据# 创建网络
model = Sequential([ # 创建卷积层提取特征,对卷积核进行正则化 Conv2D(input_shape=(28,28,1),filters=32,kernel_size=5,strides=1,padding=‘same‘,activation=‘relu‘,kernel_regularizer=l2(0.01)),
# 池化层,对特征的筛选
MaxPool2D(pool_size=(2,2),strides=2,padding=‘same‘), Flatten(),
Dense(units=128,input_dim=784,bias_initializer=‘one‘,activation=‘tanh‘),
Dropout(0.2), Dense(units=10,bias_initializer=‘one‘,activation=‘softmax‘) ]) # 编译 # 自定义优化器 sgd = SGD(lr=0.1) adma = Adam(lr=0.001) # 运用交叉熵 model.compile(optimizer=adma, loss=‘categorical_crossentropy‘, # 得到训练过程中的准确率 metrics=[‘accuracy‘]) model.fit(x_train,y_train,batch_size=32,epochs=10,validation_split=0.2) # 评估模型 loss,acc = model.evaluate(x_test,y_test,) print(‘\ntest loss‘,loss) print(‘test acc‘,acc)
out:
Epoch 1/10
32/48000 [..............................] - ETA: 10:18 - loss: 2.7563 - acc: 0.1562
96/48000 [..............................] - ETA: 3:53 - loss: 2.6141 - acc: 0.1354
......
......
Epoch 10/10
45952/48000 [===========================>..] - ETA: 0s - loss: 0.0664 - acc: 0.9905
47616/48000 [============================>.] - ETA: 0s - loss: 0.0663 - acc: 0.9908
48000/48000 [==============================] - 2s 37us/step - loss: 0.0663 - acc: 0.9910 - val_loss: 0.0149 - val_acc: 0.9884
32/10000 [..............................] - ETA: 4s
3360/10000 [=========>....................] - ETA: 0s
5824/10000 [================>.............] - ETA: 0s
8512/10000 [========================>.....] - ETA: 0s
10000/10000 [==============================] - 0s 20us/step
test loss 0.015059704356454312
test acc 0.988
原文:https://www.cnblogs.com/wigginess/p/13062891.html