我知道问题已经以某种方式发布,但是找不到我问题的答案。
我试图从tensorflow的Bi-directionnalLSTM层上添加'Attention'层,但收到以下错误消息:添加的层必须是Layer类的实例。找到:
这是我的代码:
from tensorflow.keras.models import Sequential
from tensorflow.keras.layers import LSTM,Dense,Dropout,GRU,BatchNormalization,Activation,Bidirectional,TimeDistributed,RepeatVector
from tensorflow.keras.layers import Attention
def create_attention(units,n_steps_in,n_features,dr):
model = Sequential()
lstm = LSTM(units, activation='sigmoid',return_sequences=True,
input_shape=(n_steps_in, n_features),
dropout = dr)
model.add(Bidirectional(lstm, input_shape=(n_steps_in, n_features)))
model.add(Attention)
model.add(TimeDistributed(Dense(1, activation='sigmoid')))
return model
这是完整的错误消息:
TypeError Traceback (most recent call last)
<ipython-input-107-de8d4616e3c7> in <module>
74 #model = create_lstm(u ,n_steps_in,n_features,name,name)
75 #model = create_bidirectional(u,n_steps_in,n_features,name)
---> 76 model = create_attention(u,n_steps_in,n_features,name)
77 model.compile(optimizer='adam',loss='binary_crossentropy',metrics=['Precision','Recall',fbeta])
78 mode = scrng[s]['mode']
<ipython-input-106-c7dd103a8ded> in create_attention(units, n_steps_in, n_features, dr)
84 model.add(Bidirectional(lstm, input_shape=(n_steps_in, n_features)))
85
---> 86 model.add(Attention)
87 model.add(TimeDistributed(Dense(1, activation='sigmoid')))
88 return model
~/.local/share/virtualenvs/predictionpa-SrVGL0Nv/lib/python3.7/site-packages/tensorflow_core/python/training/tracking/base.py in _method_wrapper(self, *args, **kwargs)
455 self._self_setattr_tracking = False # pylint: disable=protected-access
456 try:
--> 457 result = method(self, *args, **kwargs)
458 finally:
459 self._self_setattr_tracking = previous_value # pylint: disable=protected-access
~/.local/share/virtualenvs/predictionpa-SrVGL0Nv/lib/python3.7/site-packages/tensorflow_core/python/keras/engine/sequential.py in add(self, layer)
159 raise TypeError('The added layer must be '
160 'an instance of class Layer. '
--> 161 'Found: ' + str(layer))
162
163 tf_utils.assert_no_legacy_layers([layer])
TypeError: The added layer must be an instance of class Layer. Found: <class 'tensorflow.python.keras.layers.dense_attention.Attention'>