class Dense(layers.Layer):
def __init__(self, units):
super(Dense, self).__init__()
self.units = units
def build(self, input_shape):
self.w = self.add_weight(
name='w',
shape=(input_shape[-1], self.units),
initializer='random_normal',
trainable=True,
)
self.b = self.add_weight(
name='b',
shape=(self.units,),
initializer='zeros',
trainable=True,
)
def call(self, inputs):
return tf.matmul(inputs, self.w) + self.b
class MyRelu(layers.Layer):
def __init__(self):
super(MyRelu, self).__init__()
def call(self, x):
return tf.math.maximum(x, 0)
class MyModel(keras.Model):
def __init__(self, num_classes=10):
super(MyModel, self).__init__()
self.dense1 = Dense(64)
self.dense2 = Dense(num_classes)
self.relu = MyRelu()
def call(self, input_tensor):
x = self.relu(self.dense1(input_tensor))
return self.dense2(x)
model = MyModel()
model.compile(
loss=keras.losses.SparseCategoricalCrossentropy(from_logits=True), #from_logits is used cuz softmax isn't used in the output layer
optimizer = keras.optimizers.Adam(),
metrics=['accuracy'],
)
model.fit(X_train, y_train, batch_size=32, epochs=2, verbose=2)
#print(model.summary())
model.evaluate(X_test, y_test, batch_size=32, verbose=2)
这是我得到的错误:
ValueError: 没有为任何变量提供梯度:['my_model/dense/w:0', 'my_model/dense/b:0', 'my_model/dense_1/w:0', 'my_model/dense_1/b: 0']。
我似乎无法找到解决此错误的方法。我看过其他类似的错误,但没有运气。 提前致谢!