如何将输入传递给连接层,例如将其传递给TimeDistributed层时,输出的形状应该为(None,1,1536)而不是(None,32,1536)?
input_shape = (32, 32, 32, 4)
cnn_count = 5
def create_cnn_lstm_model():
model = create_shared_weight_cnn()
for i in range(cnn_count):
temp_name_input = 'input_' + str(i)
globals()[temp_name_input] = Input(shape=input_shape)
combined_input = []
for i in range(cnn_count):
temp_name_input = 'input_' + str(i)
combined_input.append(globals()[temp_name_input])
seq = concatenate(combined_input)
out = TimeDistributed(Lambda(lambda x: model(combined_input)))(seq)
out = LSTM(512)(out)
out = Dense(4, activation='softmax')(out)
cnn_lstm = Model(inputs=combined_input, outputs=out)
return cnn_lstm