我尝试使用servral方法在分布式tensorflow设置中编写摘要,但所有方法均失败。我上次尝试失败
InvalidArgumentError: You must feed a value for placeholder tensor 'Placeholder' with dtype float
以下是我的代码:
if arguments.job_name == "ps":
server.join()
elif arguments.job_name == "worker":
is_chief = (arguments.task_index == 0)
with tf.device(tf.train.replica_device_setter(worker_device="/job:worker/task:%d" % arguments.task_index,
cluster=cluster)):
x = tf.placeholder(tf.float32, shape=(None, feature_num), name="input_x")
y = tf.placeholder(tf.float32, shape=(None, 1), name="input_y")
w = tf.get_variable("weight", (feature_num, 1), initializer=tf.random_normal_initializer())
b = tf.get_variable("bais", (1, 1), initializer=tf.constant_initializer(0.0))
p_y = tf.add(tf.matmul(x, w), b)
loss = tf.reduce_sum(tf.square(y - p_y), name="loss")
global_step = tf.contrib.framework.get_or_create_global_step()
opt = tf.train.GradientDescentOptimizer(learning_rate=0.0000001)
training_op = opt.minimize(loss, global_step=global_step)
tf.summary.scalar("loss",loss)
summary_op = tf.summary.merge_all()
summary_hook = tf.train.SummarySaverHook(save_secs=1, output_dir="./summary", summary_op=summary_op)
with tf.train.MonitoredTrainingSession(master=server.target,
is_chief=is_chief,
hooks=[summary_hook],save_summaries_secs=None,save_summaries_steps=None,
checkpoint_dir="./model",save_checkpoint_secs=5) as mon_sess:
for (x_data, y_data) in dataSet:
if not mon_sess.should_stop():
print("============training...=====================")
mon_sess.run(training_op, feed_dict={x: x_data, y: y_data})
mon_sess.run(summary_op, feed_dict={x: x_data, y: y_data})
print("finised!!!!")
有人可以提供帮助吗?