tensorflow系列教程之四:一元线性回归

本节主要实现一个简单的一元线性回归,这个例子相当于tensorflow的helloworld

In [6]:
import tensorflow as tf
import numpy as np

#使用numpy生成1000个随机数
x_data=np.random.rand(10)
y_data=x_data*0.5+8

x=tf.placeholder(dtype=tf.float32)
y=tf.placeholder(dtype=tf.float32)

w=tf.Variable(0.,dtype=tf.float32)
b=tf.Variable(0.,dtype=tf.float32)

#定义训练模型
model=w*x+b

#定义损失函数
loss=tf.reduce_mean(tf.square(model-y))

#定义一个梯度下降法优化器
optimizer=tf.train.GradientDescentOptimizer(0.1)

#定义一个训练器
train=optimizer.minimize(loss)

with tf.Session() as sess:
    sess.run(tf.global_variables_initializer())
    for i in range(1000):
        sess.run(train,feed_dict={x:x_data,y:y_data})
        if i%20==0:
            print(sess.run([w,b]))
[0.60954803, 1.6365861]
[2.0641718, 7.3340487]
[1.5782999, 7.5643845]
[1.2378199, 7.7020593]
[1.0048186, 7.7961483]
[0.84539807, 7.860524]
[0.7363223, 7.90457]
[0.66169244, 7.9347067]
[0.6106302, 7.955326]
[0.57569355, 7.969434]
[0.55178964, 7.9790864]
[0.5354345, 7.985691]
[0.5242443, 7.9902096]
[0.51658785, 7.9933014]
[0.51134944, 7.9954166]
[0.5077653, 7.996864]
[0.5053129, 7.997854]
[0.5036349, 7.998532]
[0.5024864, 7.998996]
[0.50170094, 7.999313]
[0.5011638, 7.99953]
[0.5007962, 7.999678]
[0.5005446, 7.9997797]
[0.50037265, 7.999849]
[0.5002551, 7.9998965]
[0.50017464, 7.999929]
[0.5001198, 7.999951]
[0.50008184, 7.999966]
[0.5000564, 7.9999766]
[0.5000384, 7.9999847]
[0.50002605, 7.999989]
[0.50001806, 7.9999924]
[0.50001276, 7.999994]
[0.5000093, 7.999995]
[0.50000703, 7.999996]
[0.50000584, 7.9999967]
[0.5000047, 7.999997]
[0.50000423, 7.999997]
[0.50000423, 7.999997]
[0.50000423, 7.999997]
[0.50000423, 7.999997]
[0.50000423, 7.999997]
[0.50000423, 7.999997]
[0.50000423, 7.999997]
[0.50000423, 7.999997]
[0.50000423, 7.999997]
[0.50000423, 7.999997]
[0.50000423, 7.999997]
[0.50000423, 7.999997]
[0.50000423, 7.999997]