class LinearRegression:
def __init__(self,epoch=10,optimizer = tf.optimizers.Adam(0.001),verbose=0):
self.epochs = epoch
self.verbbose= verbose
self.optimizer = optimizer
self.W = tf.Variable(np.random.randn(),name = 'Weights')
self.b = tf.Variable(np.random.randn(),name = 'Intercept')
def linear_regression(self,x):
return (self.W * x) + self.b
def loss(self,actual,predicted):
return tf.reduce_sum(tf.pow(actual - predicted,2)) / (actual.shape[0])
def optimize(self,X,Y):
with tf.GradientTape() as g:
ypred = self.linear_regression(X)
loss = self.loss(Y, ypred)
grad = g.gradient(loss,[self.W,self.b])
self.optimizer.apply_gradients(zip(grad,[self.W,self.b]))
def fit(self,X,Y):
for i in range(self.epochs):
self.optimize(X,Y)
pred = self.linear_regression(X)
loss = self.loss(Y,pred)
if i % self.verbose == 0:
print('Epoch :',' '*(space-len(str(i))), ' {}'.format(i),end=" ")
print('| Loss : {:.4f} '.format(loss))
def predict(self,x):
return linear_regression(x)
---------------------------------------------------------------------------
ValueError Traceback (most recent call last)
<ipython-input-36-ae02c60d14d2> in <module>
----> 1 linear.fit(xtrain,ytrain)
<ipython-input-34-407bc32b4133> in fit(self, X, Y)
27 def fit(self,X,Y):
28 for i in range(self.epochs):
---> 29 self.optimize(X,Y)
30 pred = self.linear_regression(X)
31 loss = self.loss(Y,pred)
<ipython-input-34-407bc32b4133> in optimize(self, X, Y)
22 loss = self.loss(Y, ypred)
23 grad = g.gradient(loss,[self.W,self.b])
---> 24 self.optimizer.apply_gradients(zip(grad,[self.W,self.b]))
25
26
/usr/local/lib/python3.6/dist-packages/tensorflow_core/python/keras/optimizer_v2/optimizer_v2.py in apply_gradients(self, grads_and_vars, name)
425 ValueError: If none of the variables have gradients.
426 """
--> 427 grads_and_vars = _filter_grads(grads_and_vars)
428 var_list = [v for (_, v) in grads_and_vars]
429
/usr/local/lib/python3.6/dist-packages/tensorflow_core/python/keras/optimizer_v2/optimizer_v2.py in _filter_grads(grads_and_vars)
1023 if not filtered:
1024 raise ValueError("No gradients provided for any variable: %s." %
-> 1025 ([v.name for _, v in grads_and_vars],))
1026 if vars_with_empty_grads:
1027 logging.warning(
ValueError: No gradients provided for any variable: ['Weights:0', 'Intercept:0'].
为什么会出现此错误?即使我正在更新权重。我什至尝试在优化器功能之上添加装饰功能(@ tf.function),如下所示。但是,我收到了NotImplementedError错误。
@tf.function
def optimize(self,X,Y):
with tf.GradientTape() as g:
ypred = self.linear_regression(X)
loss = self.loss(Y, ypred)
grad = g.gradient(loss,[self.W,self.b])
self.optimizer.apply_gradients(zip(grad,[self.W,self.b]))
---------------------------------------------------------------------------
NotImplementedError Traceback (most recent call last)
<ipython-input-39-ae02c60d14d2> in <module>
----> 1 linear.fit(xtrain,ytrain)
<ipython-input-37-41fd823183c7> in fit(self, X, Y)
27 def fit(self,X,Y):
28 for i in range(self.epochs):
---> 29 self.optimize(X,Y)
30 pred = self.linear_regression(X)
31 loss = self.loss(Y,pred)
/usr/local/lib/python3.6/dist-packages/tensorflow_core/python/eager/def_function.py in __call__(self, *args, **kwds)
455
456 tracing_count = self._get_tracing_count()
--> 457 result = self._call(*args, **kwds)
458 if tracing_count == self._get_tracing_count():
459 self._call_counter.called_without_tracing()
/usr/local/lib/python3.6/dist-packages/tensorflow_core/python/eager/def_function.py in _call(self, *args, **kwds)
501 # This is the first call of __call__, so we have to initialize.
502 initializer_map = object_identity.ObjectIdentityDictionary()
--> 503 self._initialize(args, kwds, add_initializers_to=initializer_map)
504 finally:
505 # At this point we know that the initialization is complete (or less
/usr/local/lib/python3.6/dist-packages/tensorflow_core/python/eager/function.py in bound_method_wrapper(*args, **kwargs)
2656 # However, the replacer is still responsible for attaching self properly.
2657 # TODO(mdan): Is it possible to do it here instead?
-> 2658 return wrapped_fn(*args, **kwargs)
2659 weak_bound_method_wrapper = weakref.ref(bound_method_wrapper)
2660
/usr/local/lib/python3.6/dist-packages/tensorflow_core/python/framework/func_graph.py in wrapper(*args, **kwargs)
903 except Exception as e: # pylint:disable=broad-except
904 if hasattr(e, "ag_error_metadata"):
--> 905 raise e.ag_error_metadata.to_exception(e)
906 else:
907 raise
NotImplementedError: in converted code:
<ipython-input-37-41fd823183c7>:22 optimize *
loss = self.loss(Y, ypred)
<ipython-input-37-41fd823183c7>:15 loss *
return tf.reduce_sum(tf.pow(actual - predicted,2)) / (actual.shape[0])
/usr/local/lib/python3.6/dist-packages/pandas/core/ops/__init__.py:1050 wrapper
left, result, index=left.index, name=res_name, dtype=None
/usr/local/lib/python3.6/dist-packages/pandas/core/ops/__init__.py:919 _construct_result
out = left._constructor(result, index=index, dtype=dtype)
/usr/local/lib/python3.6/dist-packages/pandas/core/series.py:314 __init__
data = sanitize_array(data, index, dtype, copy, raise_cast_failure=True)
/usr/local/lib/python3.6/dist-packages/pandas/core/internals/construction.py:696 sanitize_array
subarr = _try_cast(data, dtype, copy, raise_cast_failure)
/usr/local/lib/python3.6/dist-packages/pandas/core/internals/construction.py:784 _try_cast
subarr = maybe_cast_to_datetime(arr, dtype)
/usr/local/lib/python3.6/dist-packages/pandas/core/dtypes/cast.py:1114 maybe_cast_to_datetime
value = maybe_infer_to_datetimelike(value)
/usr/local/lib/python3.6/dist-packages/pandas/core/dtypes/cast.py:911 maybe_infer_to_datetimelike
v = np.array(v, copy=False)
/usr/local/lib/python3.6/dist-packages/tensorflow_core/python/framework/ops.py:736 __array__
" array.".format(self.name))
NotImplementedError: Cannot convert a symbolic Tensor (sub:0) to a numpy array.
请帮帮我,我正在尝试使用tensorflow2.0进行线性回归,我遇到了一个错误,指出没有提供渐变