简单神经网络实现Mnist数据集的训练

import numpy as np
import tensorflow as tf
import matplotlib.pyplot  as plt
from tensorflow.examples.tutorials.mnist import input_data 


# In[37]:


mnist = input_data.read_data_sets('tang/',one_hot= True)


# In[22]:


n_hidden_1=256
n_hidden_2=128
n_input   =784
n_classes =10

x=tf.placeholder("float",[None,n_input])
y=tf.placeholder("float",[None,n_classes])

stddev=0.1
weights={
    'w1':tf.Variable(tf.random_normal([n_input,n_hidden_1],stddev=stddev)),
    'w2':tf.Variable(tf.random_normal([n_hidden_1,n_hidden_2],stddev=stddev)),
    'out':tf.Variable(tf.random_normal([n_hidden_2,n_classes],stddev=stddev))
    
}
biases={
    'b1':tf.Variable(tf.random_normal([n_hidden_1])),
    'b2':tf.Variable(tf.random_normal([n_hidden_2])),
    'out':tf.Variable(tf.random_normal([n_classes]))
}
print("NETWORK READY")


# In[28]:


def multilayer_perceptron (_X,_weights,_biases):
    layer_1=tf.sigmoid(tf.add(tf.matmul(_X,_weights['w1']),_biases['b1']))
    layer_2=tf.sigmoid(tf.add(tf.matmul(layer_1,_weights['w2']),_biases['b2']))
    return (tf.matmul(layer_2,_weights['out'])+_biases['out'])


# In[30]:


pred=multilayer_perceptron(x,weights,biases)

cost=tf.reduce_sum(tf.nn.softmax_cross_entropy_with_logits(logits=pred,labels=y))
optm=tf.train.GradientDescentOptimizer(learning_rate=0.001).minimize(cost)
corr=tf.equal(tf.argmax(pred,1),tf.argmax(y,1))
accr=tf.reduce_mean(tf.cast(corr,"float"))

init=tf.global_variables_initializer()
print("FUNCTIONS READY")


# In[38]:


training_epochs=100
batch_size      =100
display_step    =4

sess=tf.Session()
sess.run(init)

for epoch in range(training_epochs):
    avg_cost=0.
    total_batch=int(mnist.train.num_examples/batch_size)
    
    for i in range(total_batch):
        batch_xs,batch_ys=mnist.train.next_batch(batch_size)
        feeds={x:batch_xs,y:batch_ys}
        sess.run(optm,feed_dict=feeds)
        avg_cost+=sess.run(cost,feed_dict=feeds)
    avg_cost=avg_cost/total_batch
    
    if(epoch+1)%display_step ==0:
        print ("Epoch:%03d/%03d cost:%.9f"% (epoch,training_epochs,avg_cost))
        feeds={x:batch_xs,y:batch_ys}
        train_acc=sess.run(accr,feed_dict=feeds)
        print("TRAIN ACCURACY: %.3f"%  (train_acc))
        feeds={x:mnist.test.images,y:mnist.test.labels}
        test_acc=sess.run(accr,feed_dict=feeds)
        print("TEST ACCURACY:%.3f"%  (test_acc))
print  ("OPTIMIZATION FINISHED")
    

这个Mnist数据集可以直接在网上下载,然后将四个数据集放在一个文件夹里,我这里将这个文件夹命名为tang,然后必须将程序和文件放在同一地址下,可以直接读取数据集,也可以自己写地址上去。

from tensorflow.examples.tutorials.mnist import input_data 

这个语句自带从tensorflow网上直接下载mnist数据集,但是效果不是很好,很有可能下不下来

我会将这个数据集发出来,方便大家下载!

你可能感兴趣的:(简单神经网络实现Mnist数据集的训练)