資源簡介
基于Tensorflow多層神經網絡的MNIST手寫數字識別(數據集源碼).rar

代碼片段和文件信息
#?@author?ZwwIot
#!/usr/bin/env?python
#?coding:?utf-8
#?In[17]:
import?tensorflow?as?tf
#?導入?MNIST?數據集
from?tensorflow.examples.tutorials.mnist?import?input_data
mnist?=?input_data.read_data_sets(“/data/“?one_hot?=?True)
#?In[18]:
#?參數設置
learning_rate?=?0.001
training_epochs?=?25
batch_size?=?100
display_step?=?1
#?網絡參數
n_hidden_1?=?256#?1層網絡神經元數
n_hidden_2?=?256#?2層網絡神經元數
n_input?=?784#?MNIST?data?輸入?(img?shape:?28*28)
n_classes?=?10#?MNIST?類別?(0-9?一共10類)
saver?=?tf.train.Saver()#?保存
model_path?=?“log/520model.ckpt“
#?tf?Graph?input
x?=?tf.placeholder(“float“?[None?n_input])
y?=?tf.placeholder(“float“?[None?n_classes])
#?In[19]:
#?Create?model
def?multilayer_perceptron(x?weights?biases):
????#?Hidden?layer?with?RELU?activation
????layer_1?=?tf.add(tf.matmul(x?weights[‘h1‘])biases[‘b1‘])
????layer_1?=?tf.nn.relu(layer_1)
????#?Hidden?layer?with?RELU?activation
????layer_2?=?tf.add(tf.matmul(layer_1?weights[‘h2‘])biases[‘b2‘])
????layer_2?=?tf.nn.relu(layer_2)
????#?Output?layer?with?linear?activation
????out_layer?=?tf.matmul(layer_2?weights[‘out‘])?+?biases[‘out‘]
????return?out_layer
#?In[20]:
#?Store?layers?weight?&?bias
weights?=?{
????‘h1‘:?tf.Variable(tf.random_normal([n_input?n_hidden_1]))
????‘h2‘:?tf.Variable(tf.random_normal([n_hidden_1?n_hidden_2]))
????‘out‘:?tf.Variable(tf.random_normal([n_hidden_2?n_classes]))
}
biases?=?{
????‘b1‘:?tf.Variable(tf.random_normal([n_hidden_1]))
????‘b2‘:?tf.Variable(tf.random_normal([n_hidden_2]))
????‘out‘:?tf.Variable(tf.random_normal([n_classes]))
}
#?In[21]:
#?構建模型
pred?=?multilayer_perceptron(x?weights?biases)
#?Define?loss?and?optimizer
cost?=?tf.reduce_mean(tf.nn.softmax_cross_entropy_with_logits(logits?=?pred?labels?=?y))
optimizer?=?tf.train.AdamOptimizer(learning_rate?=?learning_rate).minimize(cost)
#?初始化變量
init?=?tf.global_variables_initializer()
#?In[25]:
#?啟動session
with?tf.Session()?as?sess:
????sess.run(tf.global_variables_initializer())
????#?啟動循環開始訓練
????for?epoch?in?range(training_epochs):
????????avg_cost?=?0.
????????total_batch?=?int(mnist.train.num_examples/batch_size)#?每一輪訓練多少批次
????????#?遍歷全部數據集
????????for?i?in?range(total_batch):
????????????batch_xs?batch_ys?=?mnist.train.next_batch(batch_size)
????????????#?Run?optimization?op?(backprop)?and?cost?op?(to?get?loss?value)
????????????_?c?=?sess.run([optimizer?cost]?feed_dict={x:?batch_xs?y:?batch_ys})
????????????#?計算平均值以使誤差值更平均
????????????avg_cost?+=?c?/?total_batch
????????????#?print(“I:“?‘%04d‘?%?(epoch?+?1)?“cost=“?“{:.9f}“.format(avg_cost))
????????#?顯示訓練中的詳細信息
????????if?(epoch+1)?%?display_step?==?0:
????????????print(“Epoch:“?‘%04d‘?%?(epoch+1)?“cost=“?“{:.9f}“.format(avg_cost))
????print(“Finished!“)
????#?測試?model
????correct_prediction?=?tf.equal(tf.argmax(pred?1)?tf.argmax(y?1))
????#?計算準確率
????accuracy?=?tf.reduce_mean(tf.cast(correct_prediction?“float“))
????print(“Accuracy:“?accuracy.eval({x:?mnist.test.images?y:?mnist.test.labels}))
???
????
?屬性????????????大小?????日期????時間???名稱
-----------?---------??----------?-----??----
?????文件???????4248??2018-12-23?23:55??MNIST多層分類.py
?????文件????7840016??2016-11-02?19:39??MNIST?DATA\MNIST?DATA\test-images
?????文件??????10008??2016-11-02?19:39??MNIST?DATA\MNIST?DATA\test-labels
?????文件???47040016??2016-11-02?19:39??MNIST?DATA\MNIST?DATA\train-images
?????文件??????60008??2016-11-02?19:39??MNIST?DATA\MNIST?DATA\train-labels
?????目錄??????????0??2018-12-23?23:56??MNIST?DATA\MNIST?DATA
?????目錄??????????0??2018-12-23?23:56??MNIST?DATA
-----------?---------??----------?-----??----
?????????????54954296????????????????????7
評論
共有 條評論