所以首先我在模型中使用GloVe可以正常工作,但是现在我想切换到Elmo,但总是会收到该错误:
ValueError:
Concatenate
层需要具有匹配形状的输入 除了concat轴。得到了输入形状:[[None,20),(None, 20),(None,20,5)]
您能帮我吗?这是我的代码段:如果您需要更多详细信息,请告诉我。
pos_embedding_layer = Embedding(output_dim = pos_tag_embedding_size,
input_dim = len(SPACY_POS_TAGS),
input_length = sent_maxlen,
name='pos_embedding_layer')
inputs = [Input((sent_maxlen,), dtype='int32', name='word_inputs'),
Input((sent_maxlen,), dtype='int32', name='predicate_inputs'),
Input((sent_maxlen,), dtype='int32', name='postags_inputs')]
# ElmoEmbeddingLayer()
embedding_layer = ElmoEmbeddingLayer()
embeddings = [embedding_layer(inputs[0]),
embedding_layer(inputs[1]),
pos_embedding_layer(inputs[2])]
outputI = predict_layer(dropout(latent_layers(keras.layers.concatenate(embeddings))))
#error thrown here in outputI
## ------> 10]Build model
model = Model(inputs, outputI)
class ElmoEmbeddingLayer(Layer):
def __init__(self, **kwargs):
self.dimensions = 1024
self.trainable = True
super(ElmoEmbeddingLayer, self).__init__(**kwargs)
def build(self, input_shape):
self.elmo = hub.Module('https://tfhub.dev/google/elmo/2', trainable=self.trainable, name="{}_module".format(self.name))
self.trainable_weights += K.tf.trainable_variables(scope="^{}_module/.*".format(self.name))
super(ElmoEmbeddingLayer, self).build(input_shape)
def call(self, x, mask=None):
result = self.elmo(K.squeeze(K.cast(x, tf.string), axis=1),
as_dict=True,
signature='default',
)['default']
return result
def compute_mask(self, inputs, mask=None):
return K.not_equal(inputs, '--PAD--')
def compute_output_shape(self, input_shape):
return (input_shape[0], self.dimensions)