Function()定义Model
with tf.variable_scope('RNNLM') as scope:
self.initial_state = tf.zeros(shape = (self.config.batch_size, self.config.hidden_size) )
H = tf.get_variable('H',(self.config.hidden_size, self.config.hidden_size),tf.float32,init)
I = tf.get_variable('I',(self.config.embed_size, self.config.hidden_size),tf.float32,init)
b_1= tf.get_variable('bias-1',(1,self.config.hidden_size),tf.float32,init)
U = tf.get_variable('U',(self.config.hidden_size, len(self.vocab)),tf.float32,init)
b_2= tf.get_variable('bias-2',(1,len(self.vocab)),tf.float32,init)
with tf.variable_scope('RNNLM') as scope:
scope.reuse_variables()
self.current_state = self.initial_state
H = tf.get_variable('H',(self.config.hidden_size, self.config.hidden_size),tf.float32,init)
I = tf.get_variable('I',(self.config.embed_size, self.config.hidden_size),tf.float32,init)
b_1= tf.get_variable('bias-1',(1,self.config.hidden_size),tf.float32,init)
U = tf.get_variable('U',(self.config.hidden_size, len(self.vocab)),tf.float32,init)
b_2= tf.get_variable('bias-2',(1,len(self.vocab)),tf.float32,init)
for t in xrange(self.config.num_steps):
self.current_state = tf.sigmoid(tf.matmul(self.current_state,H)+tf.matmul(inputs[t],I)+b_1)
rnn_outputs.append(self.current_state)
self.final_state = rnn_outputs[-1]
定义训练操作的功能(损失)
train_op = tf.train.AdamOptimizer(self.config.lr).minimize(loss)
ValueError:变量RNNLM / RNNLM / RNNLM / Variable_1 / Adadelta /不存在,或者未使用tf.get_variable()创建。你的意思是在VarScope中设置reuse = None吗?
答案 0 :(得分:2)
您不小心通过调用train_op
将RNNLM
的声明放入了处于'可变共享'模式的范围scope.reuse_variables()
中。将其移出范围,您将运行代码。