tensorsflow v 1.0中的embedding_rnn_seq2seq

时间:2017-04-17 11:12:20

标签: tensorflow

我正在使用tensorflow v 1.0 这是代码: -

 from tensorflow.python.framework import dtypes
        from tensorflow.python.framework import ops
        from tensorflow.python.ops import array_ops
        from tensorflow.python.ops import control_flow_ops
        from tensorflow.python.ops import embedding_ops
        from tensorflow.python.ops import math_ops
        from tensorflow.python.ops import nn_ops
        from tensorflow.python.ops import rnn
        from tensorflow.python.ops import variable_scope
        import tensorflow as tf
        with variable_scope.variable_scope(scope or "embedding_rnn_seq2seq"):
        # Encoder.
        encoder_cell = tf.contrib.rnn.EmbeddingWrapper(
            cell, embedding_classes=num_encoder_symbols,
            embedding_size=embedding_size)
        _, encoder_state = rnn.rnn(encoder_cell, encoder_inputs, dtype=dtype)
        # Decoder.
        if output_projection is None:
          cell = tf.contrib.rnn.OutputProjectionWrapper(cell, num_decoder_symbols)

我收到此错误 这是完整更新的错误
错误: -

Traceback (most recent call last):
File "neural_conversation_model.py", line 323, in <module>
tf.app.run()
File "/usr/local/lib/python2.7/dist-packages/tensorflow/python/platform/app.py", line 44, in run
_sys.exit(main(_sys.argv[:1] + flags_passthrough))
File "neural_conversation_model.py", line 320, in main
train()
File "neural_conversation_model.py", line 155, in train
model = create_model(sess, False,beam_search=beam_search, beam_size=beam_size, attention=attention)
File "neural_conversation_model.py", line 104, in create_model
forward_only=forward_only, beam_search=beam_search, beam_size=beam_size, attention=attention)
File "/home/pratik/Desktop/Neural_Conversation_Models-master/seq2seq_model.py", line 159, in __init__
softmax_loss_function=softmax_loss_function)
File "/home/pratik/Desktop/Neural_Conversation_Models-master/my_seq2seq.py", line 975, in model_with_buckets
decoder_inputs[:bucket[1]])
File "/home/pratik/Desktop/Neural_Conversation_Models-master/seq2seq_model.py", line 158, in <lambda>
lambda x, y: seq2seq_f(x, y, False),
File "/home/pratik/Desktop/Neural_Conversation_Models-master/seq2seq_model.py", line 101, in seq2seq_f
beam_size=beam_size )
File "/home/pratik/Desktop/Neural_Conversation_Models-master/my_seq2seq.py", line 817, in embedding_attention_seq2seq
encoder_outputs, encoder_state = tf.nn.rnn(

AttributeError:&#39; module&#39;对象没有属性&#39; rnn&#39;

由于 Pratik Goyal

0 个答案:

没有答案