我正在尝试在网络中使用自定义的RNN单元。我从Keras的RNN单元示例开始,其中RNN单元定义为MinimalRNNCell。当我尝试在循环网络中使用定义的单元格时,通过使用自定义RNN单元格替换以前使用的simpleRNN,但出现此错误:
ValueError:操作具有None
用于渐变。请确保您所有的操作都定义了渐变(即可区分)。没有渐变的常见操作:K.argmax,K.round,K.eval。
class MinimalRNNCell(Layer):
def __init__(self, units, **kwargs):
self.units = units
self.state_size = units
super(MinimalRNNCell, self).__init__(**kwargs)
def build(self, input_shape):
self.kernel = self.add_weight(shape=(input_shape[-1], self.units),
initializer='uniform',
name='kernel')
self.recurrent_kernel = self.add_weight(
shape=(self.units, self.units),
initializer='uniform',
name='recurrent_kernel')
self.built = True
def call(self, inputs, states):
prev_output = states[0]
h = K.dot(inputs, self.kernel)
output = h + K.dot(prev_output, self.recurrent_kernel)
return output, [output]
# Let's use this cell in a RNN layer:
cell = MinimalRNNCell(32)
x = keras.Input((None, n_f))
layer = RNN(cell)
y = layer(x)
# prepare sequence
length = 10
n_f = 10
# define LSTM configuration
n_neurons = length
n_batch = length
n_epoch = 1000
# create LSTM
model = Sequential()
#model.add(SimpleRNN(n_neurons, input_shape=(length, n_f) ,return_sequences=True))
model.add(RNN(cell, input_shape=(length, n_f) ,return_sequences=True))
model.add(TimeDistributed(Dense(n_neurons)))
model.add(Activation('relu'))
model.add(Dense(1))
model.compile(loss='mean_squared_error', optimizer='adam')
print(model.summary())
# train LSTM
ES = EarlyStopping(monitor='val_loss', mode='min', verbose=1, patience= int(n_epoch/2))
history = model.fit(X_train, y_train, validation_data= (X_Val,y_Val),epochs=n_epoch, batch_size=n_batch, verbose=2 , callbacks=[ES])