我试图合并2个CNN,然后通过自动编码器取消合并,但我总是收到相同的错误消息,内容为“ ValueError:新数组的总大小必须保持不变”,不确定我的网络出了什么问题。
我将DSTM层替换为LSTM和CNN,并且可以使用。
感谢您的帮助!
from keras.layers import Input, Dense, Conv1D, MaxPooling1D, UpSampling1D, Embedding, Dropout, Flatten, Concatenate, LSTM, BatchNormalization, Reshape
from keras.models import Model
from keras import backend as K
from keras.callbacks import TensorBoard
input_address = Input(shape=(38,))
x_address = Reshape((38, 1))(input_address)
x_address = LSTM(125, activation="tanh", return_sequences=True)(x_address)
x_address = Dropout(0.4)(x_address)
x_address = LSTM(125, activation="tanh", return_sequences=True)(x_address)
x_address = Dropout(0.4)(x_address)
x_address = Dense(100, activation = 'relu')(x_address)
x_address = Conv1D(100, 2, activation='relu', padding='same')(x_address)
x_address = MaxPooling1D(2, padding='same')(x_address)
x_address = Dropout(0.5)(x_address)
x_address = Flatten()(x_address)
input_name = Input(shape=(29,))
x_name = Reshape((29, 1))(input_name)
x_name = LSTM(125, activation="tanh", return_sequences=True)(x_name)
x_name = Dropout(0.4)(x_name)
x_name = LSTM(125, activation="tanh", return_sequences=True)(x_name)
x_name = Dropout(0.4)(x_name)
x_name = Dense(100, activation = 'relu')(x_name)
x_name = Conv1D(100, 2, activation='relu', padding='same')(x_name)
x_name = MaxPooling1D(2, padding='same')(x_name)
x_name = Dropout(0.5)(x_name)
x_name = Flatten()(x_name)
merge = Concatenate(name="concat", axis=1)([x_name, x_address])
encoder = Dense(1, activation = 'relu')(merge)
xd_address = Reshape((19, 100))(encoder)
xd_address = UpSampling1D(2)(xd_address)
xd_address = Conv1D(100, 2, activation='relu', padding='same')(xd_address)
xd_address = Dropout(0.4)(xd_address)
xd_address = LSTM(125, activation="tanh", return_sequences=True)(xd_address)
xd_address = Dropout(0.4)(xd_address)
xd_address = LSTM(125, activation="tanh", return_sequences=True)(xd_address)
xd_address = Flatten()(xd_address)
xd_name = Reshape((15, 100))(encoder)
xd_name = UpSampling1D(2)(xd_name)
xd_name = Conv1D(100, 2, activation='relu', padding='same')(xd_name)
xd_name = Dropout(0.4)(xd_name)
xd_name = LSTM(125, activation="tanh", return_sequences=True)(xd_name)
xd_name = Dropout(0.4)(xd_name)
xd_name = LSTM(125, activation="tanh", return_sequences=True)(xd_name)
xd_name = Flatten()(xd_name)
autoencoder = Model(inputs=[input_name, input_address], outputs=[xd_name, xd_address])
autoencoder.compile(optimizer='adam', loss=['mse', 'mse'])
autoencoder.summary()
autoencoder.fit([name_train, address_train], [name_train, address_train]
,epochs=10
,batch_size=120
,shuffle=True
,validation_data=([name_test, address_test], [name_test, address_test])
,callbacks=[TensorBoard(log_dir='/tmp/autoencoder')])
这是我收到的错误消息
---------------------------------------------------------------------------
ValueError Traceback (most recent call last)
<ipython-input-132-19e453d673b6> in <module>()
33
34
---> 35 xd_address = Reshape((19, 100))(encoder)
36 xd_address = UpSampling1D(2)(xd_address)
37 xd_address = Conv1D(100, 2, activation='relu', padding='same')(xd_address)
~\Anaconda3\lib\site-packages\keras\engine\base_layer.py in __call__(self, inputs, **kwargs)
472 if all([s is not None
473 for s in to_list(input_shape)]):
--> 474 output_shape = self.compute_output_shape(input_shape)
475 else:
476 if isinstance(input_shape, list):
~\Anaconda3\lib\site-packages\keras\layers\core.py in compute_output_shape(self, input_shape)
392 # input shape known? then we can compute the output shape
393 return (input_shape[0],) + self._fix_unknown_dimension(
--> 394 input_shape[1:], self.target_shape)
395
396 def call(self, inputs):
~\Anaconda3\lib\site-packages\keras\layers\core.py in _fix_unknown_dimension(self, input_shape, output_shape)
380 output_shape[unknown] = original // known
381 elif original != known:
--> 382 raise ValueError(msg)
383
384 return tuple(output_shape)
ValueError: total size of new array must be unchanged
答案 0 :(得分:0)
您正在重塑大小,以更改原始encoder
张量中的数字元素。您需要:
print(encoder)
,这将为您提供有关原始形状的提示。Reshape
。这是一个简短的示例,(100,)
不能重塑为(2,45)
,因为省略了10个元素,即“新数组的总大小必须保持不变”。