TensorFlow-Example3.1.1

TensorFlow-Example3.1.1




Example3.1_1







In [1]:
import tensorflow as tf
import numpy as np
import matplotlib.pyplot as plt
In [2]:
plotdata = {'batchsize':[],'loss':[]}
def moving_average(a,w=10):
    if len(a)<w:
        return a[:]
    return [val if idx < w else sum(a[(idx-w):idx])/w for idx,val in enumerate(a)]
In [3]:
train_X=np.linspace(-1,1,100)
train_Y=2*train_X+np.random.randn(*train_X.shape)*0.3

np.random.randn(*train_X.shape) same as np.random.randn(100)

In [4]:
plt.plot(train_X,train_Y,'g+',label='Original data')
plt.legend()
plt.show()
In [5]:
X=tf.placeholder('float')
Y=tf.placeholder('float')

w=tf.Variable(tf.random_normal([1]),name='weight') #初始化成[-1,1]的随机数,形状为一维数字
b=tf.Variable(tf.zeros([1]),name='bias')#初始化为0,形状为一维数字

z=tf.multiply(X,w)+b #multiply-乘法
In [6]:
cost=tf.reduce_mean(tf.square(Y-z))
learning_rate=0.01#学习率,大:速度快但不精确<->小:精确但速度慢
optimizer=tf.train.GradientDescentOptimizer(learning_rate).minimize(cost)#梯度下降
In [7]:
init=tf.global_variables_initializer()
traing_epochs=20
display_step=2 #变量初始化+定义参数
In [13]:
with tf.Session() as sess:
    sess.run(init)
    plotdata={'batchsize':[],'loss':[]}#存放批次,损失值
    for epoch in range(traing_epochs):
        for (x,y) in zip(train_X,train_Y):
            sess.run(optimizer,feed_dict={X:x,Y:y})#向模型输入数据
        
        #训练中的状态
        if epoch%display_step == 0:
            loss=sess.run(cost,feed_dict={X:train_X,Y:train_Y})
            print('Epoch:',epoch+1,'cost=',loss,'w=',sess.run(w),'b=',sess.run(b))
            if not (loss == 'NA'):
                plotdata['batchsize'].append(epoch)
                plotdata['loss'].append(loss)
        
    print('Finished!')
    print('cost=',sess.run(cost,feed_dict={X:train_X,Y:train_Y}),'w=',sess.run(w),'b=',sess.run(b))
    
    #图形显示
    plt.plot(train_X,train_Y,'g+',label='Original data')
    plt.plot(train_X,sess.run(w)*train_X+sess.run(b),'bx',label='Fittedline')
    plt.legend()
    plt.show()
    plotdata['avgloss']=moving_average(plotdata['loss'])
    plt.figure(1)
    plt.subplot(211)
    plt.plot(plotdata['batchsize'],plotdata['avgloss'],'b--')
    plt.xlabel('Minibatch number')
    plt.ylabel('Loss')
    plt.title('Minibatch run vs. Training loss')
    plt.show()
    print('x=0.2,z=', sess.run(z,feed_dict={X:0.2}))
Epoch: 1 cost= 0.28066245 w= [1.2828385] b= [0.2125626]
Epoch: 3 cost= 0.10059701 w= [1.7871394] b= [0.07881312]
Epoch: 5 cost= 0.08707939 w= [1.9240385] b= [0.02728474]
Epoch: 7 cost= 0.08652211 w= [1.9595456] b= [0.01367597]
Epoch: 9 cost= 0.08657783 w= [1.9687283] b= [0.01015242]
Epoch: 11 cost= 0.08660562 w= [1.971103] b= [0.00924118]
Epoch: 13 cost= 0.0866137 w= [1.9717162] b= [0.00900584]
Epoch: 15 cost= 0.08661586 w= [1.9718752] b= [0.00894481]
Epoch: 17 cost= 0.08661642 w= [1.9719166] b= [0.00892895]
Epoch: 19 cost= 0.08661655 w= [1.9719274] b= [0.00892483]
Finished!
cost= 0.08661659 w= [1.9719291] b= [0.00892424]
x=0.2,z= [0.40331006]

发表评论

电子邮件地址不会被公开。 必填项已用*标注