函数()来定义模型
with tf.variable_scope('RNNLM') as scope:
self.initial_state = tf.zeros(shape = (self.config.batch_size, self.config.hidden_size) )
H = tf.get_variable('H',(self.config.hidden_size, self.config.hidden_size),tf.float32,init)
I = tf.get_variable('I',(self.config.embed_size, self.config.hidden_size),tf.float32,init)
b_1= tf.get_variable('bias-1',(1,self.config.hidden_size),tf.float32,init)
U = tf.get_variable('U',(self.config.hidden_size, len(self.vocab)),tf.float32,init)
b_2= tf.get_variable('bias-2',(1,len(self.vocab)),tf.float32,init)
with tf.variable_scope('RNNLM') as scope:
scope.reuse_variables()
self.current_state = self.initial_state
H = tf.get_variable('H',(self.config.hidden_size, self.config.hidden_size),tf.float32,init)
I = tf.get_variable('I',(self.config.embed_size, self.config.hidden_size),tf.float32,init)
b_1= tf.get_variable('bias-1',(1,self.config.hidden_size),tf.float32,init)
U = tf.get_variable('U',(self.config.hidden_size, len(self.vocab)),tf.float32,init)
b_2= tf.get_variable('bias-2',(1,len(self.vocab)),tf.float32,init)
for t in xrange(self.config.num_steps):
self.current_state = tf.sigmoid(tf.matmul(self.current_state,H)+tf.matmul(inputs[t],I)+b_1)
rnn_outputs.append(self.current_state)
self.final_state = rnn_outputs[-1]用于定义培训操作的功能(损失)
train_op = tf.train.AdamOptimizer(self.config.lr).minimize(loss)ValueError:变量RNNLM/ Variable _1/Adadelta/不存在,或者不是用tf.get_variable()创建的。你是想把reuse=None设为VarScope吗?
发布于 2017-06-27 14:48:02
您不小心将train_op的声明放入了作用域RNNLM,该范围通过调用scope.reuse_variables()处于“变量共享”模式。将其移出范围,您将运行您的代码。
https://stackoverflow.com/questions/44782945
复制相似问题