实验结果
import tensorflow as tf
def read_data():
print("read data ...")
return tf.constant(value=[1.0, 2.0, 3.0], dtype=tf.float32)
X = read_data()
X_train = tf.placeholder(dtype=tf.float32)
with tf.Session() as sess:
for epoch in range(3):
for batch in range(3):
x = sess.run(X)
print(sess.run(X_train, feed_dict={X_train: x}))
read data ...
[1. 2. 3.]
[1. 2. 3.]
[1. 2. 3.]
[1. 2. 3.]
[1. 2. 3.]
[1. 2. 3.]
[1. 2. 3.]
[1. 2. 3.]
[1. 2. 3.]
虽然循环体内 sess.run(X) 了9次,但 read_data 实际却只调用了一次
真的好神奇!!!!
换成random,效果更明显
def read_data():
print("read data ...")
return tf.random_uniform(shape=(3,), maxval=1.0)
read data ...
[0.6201165 0.70080805 0.04186273]
[0.04440641 0.27251375 0.35242593]
[0.58647656 0.6420467 0.47325552]
[0.4188739 0.9272245 0.3594923]
[0.49990058 0.930122 0.30738378]
[0.6659864 0.48257875 0.2847129 ]
[0.64794695 0.0111196 0.00765169]
[0.9654126 0.05055571 0.20485735]
[0.7216649 0.483734 0.49638057]
只调用一次函数,但随机数竟然每次run都不一样...
继续实验
import tensorflow as tf
# def read_data():
# print("read data ...")
# return tf.constant(value=[1.0, 2.0, 3.0], dtype=tf.float32)
def hello():
print("hello")
def read_data():
print("read data ...")
print("加载10个G数据")
print("[1,2,3,4,...,99999]")
hello()
return tf.random_uniform(shape=(3,), maxval=1.0)
X = read_data()
X_train = tf.placeholder(dtype=tf.float32)
# with tf.Session() as sess:
# for epoch in range(3):
# for batch in range(3):
# x = sess.run(X)
# print(sess.run(X_train, feed_dict={X_train: x}))
read data ...
加载10个G数据
[1,2,3,4,...,99999]
hello
除了tensorflow本身的操作在sess.run才执行,其他操作都会提前执行!!!