我下载了pandas DataFrame,并获得了名为“ Close”的列作为y轴值。 作为x轴值,我获得了“范围”和“时间”,它们分别是“关闭”的长度列表和DataFrame的索引列。
我使用一些MLP修改了“关闭”的值。 如果我使用“范围”,则效果很好。 但是,使用“时间”会出错。 我不知道为什么。 如果您有帮助,请告诉我。
------这是一个代码,为文本------
import numpy as np
import tensorflow as tf
import matplotlib.pyplot as plt
import yfinance as yf
aapl = yf.download('AAPL', '2019-1-1','2019-12-27')
close=aapl['Close']
time=aapl.index
range=np.arange(len(time))
def quantile_loss(quantile,y_p,y):
e=y_p-y
return tf.keras.backend.mean(tf.keras.backend.maximum(quantile*e,(quantile-1)*e))
model=tf.keras.models.Sequential()
model.add(tf.keras.layers.Dense(50,activation='relu',input_dim=1))
model.add(tf.keras.layers.Dense(50,activation='relu'))
model.add(tf.keras.layers.Dense(50,activation='relu'))
model.add(tf.keras.layers.Dense(1,activation='linear'))
q=.5
model.compile(optimizer='adam',loss=lambda y_p, y:quantile_loss(q,y_p,y))
model.fit(range,close,epochs=20,verbose=0)
close_middle=model.predict(range)
plt.plot(range,close)
plt.plot(range,close_middle)
q=.5
model.compile(optimizer='adam',loss=lambda y_p, y:quantile_loss(q,y_p,y))
model.fit(time,close,epochs=20,verbose=0)
close_middle=model.predict(time)
plt.plot(time,close)
plt.plot(time,close_middle)
我在倒数第二个单元格中出错:
WARNING:tensorflow:Keras is training/fitting/evaluating on array-like data. Keras may not be optimized for this format, so if your input data format is supported by TensorFlow I/O (https://github.com/tensorflow/io) we recommend using that to load a Dataset instead.
---------------------------------------------------------------------------
TypeError Traceback (most recent call last)
c:\users\user\anaconda3\envs\dl\lib\site-packages\tensorflow\python\eager\execute.py in make_type(v, arg_name)
192 try:
--> 193 v = dtypes.as_dtype(v).base_dtype
194 except TypeError:
c:\users\user\anaconda3\envs\dl\lib\site-packages\tensorflow\python\framework\dtypes.py in as_dtype(type_value)
642 raise TypeError("Cannot convert value %r to a TensorFlow DType." %
--> 643 (type_value,))
TypeError: Cannot convert value dtype('<M8[ns]') to a TensorFlow DType.
During handling of the above exception, another exception occurred:
TypeError Traceback (most recent call last)
<ipython-input-38-34e04a1cd0af> in <module>
2 q=.5
3 model.compile(optimizer='adam',loss=lambda y_p, y:quantile_loss(q,y_p,y))
----> 4 model.fit(time,close,epochs=20,verbose=0)
5 close_middle=model.predict(time)
6
c:\users\user\anaconda3\envs\dl\lib\site-packages\tensorflow\python\keras\engine\training.py in _method_wrapper(self, *args, **kwargs)
64 def _method_wrapper(self, *args, **kwargs):
65 if not self._in_multi_worker_mode(): # pylint: disable=protected-access
---> 66 return method(self, *args, **kwargs)
67
68 # Running inside `run_distribute_coordinator` already.
c:\users\user\anaconda3\envs\dl\lib\site-packages\tensorflow\python\keras\engine\training.py in fit(self, x, y, batch_size, epochs, verbose, callbacks, validation_split, validation_data, shuffle, class_weight, sample_weight, initial_epoch, steps_per_epoch, validation_steps, validation_batch_size, validation_freq, max_queue_size, workers, use_multiprocessing)
813 workers=workers,
814 use_multiprocessing=use_multiprocessing,
--> 815 model=self)
816
817 # Container that configures and calls `tf.keras.Callback`s.
c:\users\user\anaconda3\envs\dl\lib\site-packages\tensorflow\python\keras\engine\data_adapter.py in __init__(self, x, y, sample_weight, batch_size, steps_per_epoch, initial_epoch, epochs, shuffle, class_weight, max_queue_size, workers, use_multiprocessing, model)
1110 use_multiprocessing=use_multiprocessing,
1111 distribution_strategy=ds_context.get_strategy(),
-> 1112 model=model)
1113
1114 strategy = ds_context.get_strategy()
c:\users\user\anaconda3\envs\dl\lib\site-packages\tensorflow\python\keras\engine\data_adapter.py in __init__(self, *args, **kwargs)
474 "recommend using that to load a Dataset instead.")
475
--> 476 super(GenericArrayLikeDataAdapter, self).__init__(*args, **kwargs)
477
478 def slice_inputs(self, indices_dataset, inputs):
c:\users\user\anaconda3\envs\dl\lib\site-packages\tensorflow\python\keras\engine\data_adapter.py in __init__(self, x, y, sample_weights, sample_weight_modes, batch_size, epochs, steps, shuffle, **kwargs)
362 indices_dataset = indices_dataset.flat_map(slice_batch_indices)
363
--> 364 dataset = self.slice_inputs(indices_dataset, inputs)
365
366 if shuffle == "batch":
c:\users\user\anaconda3\envs\dl\lib\site-packages\tensorflow\python\keras\engine\data_adapter.py in slice_inputs(self, indices_dataset, inputs)
519
520 dataset = indices_dataset.map(
--> 521 grab_batch, num_parallel_calls=dataset_ops.AUTOTUNE)
522
523 return dataset
c:\users\user\anaconda3\envs\dl\lib\site-packages\tensorflow\python\data\ops\dataset_ops.py in map(self, map_func, num_parallel_calls, deterministic)
1626 num_parallel_calls,
1627 deterministic,
-> 1628 preserve_cardinality=True)
1629
1630 def flat_map(self, map_func):
c:\users\user\anaconda3\envs\dl\lib\site-packages\tensorflow\python\data\ops\dataset_ops.py in __init__(self, input_dataset, map_func, num_parallel_calls, deterministic, use_inter_op_parallelism, preserve_cardinality, use_legacy_function)
4018 self._transformation_name(),
4019 dataset=input_dataset,
-> 4020 use_legacy_function=use_legacy_function)
4021 if deterministic is None:
4022 self._deterministic = "default"
c:\users\user\anaconda3\envs\dl\lib\site-packages\tensorflow\python\data\ops\dataset_ops.py in __init__(self, func, transformation_name, dataset, input_classes, input_shapes, input_types, input_structure, add_to_graph, use_legacy_function, defun_kwargs)
3219 with tracking.resource_tracker_scope(resource_tracker):
3220 # TODO(b/141462134): Switch to using garbage collection.
-> 3221 self._function = wrapper_fn.get_concrete_function()
3222
3223 if add_to_graph:
c:\users\user\anaconda3\envs\dl\lib\site-packages\tensorflow\python\eager\function.py in get_concrete_function(self, *args, **kwargs)
2530 """
2531 graph_function = self._get_concrete_function_garbage_collected(
-> 2532 *args, **kwargs)
2533 graph_function._garbage_collector.release() # pylint: disable=protected-access
2534 return graph_function
c:\users\user\anaconda3\envs\dl\lib\site-packages\tensorflow\python\eager\function.py in _get_concrete_function_garbage_collected(self, *args, **kwargs)
2494 args, kwargs = None, None
2495 with self._lock:
-> 2496 graph_function, args, kwargs = self._maybe_define_function(args, kwargs)
2497 if self.input_signature:
2498 args = self.input_signature
c:\users\user\anaconda3\envs\dl\lib\site-packages\tensorflow\python\eager\function.py in _maybe_define_function(self, args, kwargs)
2775
2776 self._function_cache.missed.add(call_context_key)
-> 2777 graph_function = self._create_graph_function(args, kwargs)
2778 self._function_cache.primary[cache_key] = graph_function
2779 return graph_function, args, kwargs
c:\users\user\anaconda3\envs\dl\lib\site-packages\tensorflow\python\eager\function.py in _create_graph_function(self, args, kwargs, override_flat_arg_shapes)
2665 arg_names=arg_names,
2666 override_flat_arg_shapes=override_flat_arg_shapes,
-> 2667 capture_by_value=self._capture_by_value),
2668 self._function_attributes,
2669 # Tell the ConcreteFunction to clean up its graph once it goes out of
c:\users\user\anaconda3\envs\dl\lib\site-packages\tensorflow\python\framework\func_graph.py in func_graph_from_py_func(name, python_func, args, kwargs, signature, func_graph, autograph, autograph_options, add_control_dependencies, arg_names, op_return_value, collections, capture_by_value, override_flat_arg_shapes)
979 _, original_func = tf_decorator.unwrap(python_func)
980
--> 981 func_outputs = python_func(*func_args, **func_kwargs)
982
983 # invariant: `func_outputs` contains only Tensors, CompositeTensors,
c:\users\user\anaconda3\envs\dl\lib\site-packages\tensorflow\python\data\ops\dataset_ops.py in wrapper_fn(*args)
3212 attributes=defun_kwargs)
3213 def wrapper_fn(*args): # pylint: disable=missing-docstring
-> 3214 ret = _wrapper_helper(*args)
3215 ret = structure.to_tensor_list(self._output_structure, ret)
3216 return [ops.convert_to_tensor(t) for t in ret]
c:\users\user\anaconda3\envs\dl\lib\site-packages\tensorflow\python\data\ops\dataset_ops.py in _wrapper_helper(*args)
3154 nested_args = (nested_args,)
3155
-> 3156 ret = autograph.tf_convert(func, ag_ctx)(*nested_args)
3157 # If `func` returns a list of tensors, `nest.flatten()` and
3158 # `ops.convert_to_tensor()` would conspire to attempt to stack
c:\users\user\anaconda3\envs\dl\lib\site-packages\tensorflow\python\autograph\impl\api.py in wrapper(*args, **kwargs)
260 try:
261 with conversion_ctx:
--> 262 return converted_call(f, args, kwargs, options=options)
263 except Exception as e: # pylint:disable=broad-except
264 if hasattr(e, 'ag_error_metadata'):
c:\users\user\anaconda3\envs\dl\lib\site-packages\tensorflow\python\autograph\impl\api.py in converted_call(f, args, kwargs, caller_fn_scope, options)
490
491 if not options.user_requested and conversion.is_whitelisted(f):
--> 492 return _call_unconverted(f, args, kwargs, options)
493
494 # internal_convert_user_code is for example turned off when issuing a dynamic
c:\users\user\anaconda3\envs\dl\lib\site-packages\tensorflow\python\autograph\impl\api.py in _call_unconverted(f, args, kwargs, options, update_cache)
344
345 if kwargs is not None:
--> 346 return f(*args, **kwargs)
347 else:
348 return f(*args)
c:\users\user\anaconda3\envs\dl\lib\site-packages\tensorflow\python\keras\engine\data_adapter.py in grab_batch(indices)
513 return [slice_array(inp) for inp in flat_inputs]
514
--> 515 flat_out = script_ops.eager_py_func(py_method, [indices], flat_dtypes)
516 for v, original_inp in zip(flat_out, flat_inputs):
517 v.set_shape(dynamic_shape_like(original_inp))
c:\users\user\anaconda3\envs\dl\lib\site-packages\tensorflow\python\ops\script_ops.py in eager_py_func(func, inp, Tout, name)
453 with ops.device(context.context().host_address_space()):
454 return _internal_py_func(
--> 455 func=func, inp=inp, Tout=Tout, eager=True, name=name)
456
457 return _internal_py_func(func=func, inp=inp, Tout=Tout, eager=True, name=name)
c:\users\user\anaconda3\envs\dl\lib\site-packages\tensorflow\python\ops\script_ops.py in _internal_py_func(func, inp, Tout, stateful, eager, is_grad_func, name)
339 is_async=context.is_async(),
340 Tout=Tout,
--> 341 name=name)
342 else:
343 if stateful:
c:\users\user\anaconda3\envs\dl\lib\site-packages\tensorflow\python\ops\gen_script_ops.py in eager_py_func(input, token, Tout, is_async, name)
61 "Expected list for 'Tout' argument to "
62 "'eager_py_func' Op, not %r." % Tout)
---> 63 Tout = [_execute.make_type(_t, "Tout") for _t in Tout]
64 if is_async is None:
65 is_async = False
c:\users\user\anaconda3\envs\dl\lib\site-packages\tensorflow\python\ops\gen_script_ops.py in <listcomp>(.0)
61 "Expected list for 'Tout' argument to "
62 "'eager_py_func' Op, not %r." % Tout)
---> 63 Tout = [_execute.make_type(_t, "Tout") for _t in Tout]
64 if is_async is None:
65 is_async = False
c:\users\user\anaconda3\envs\dl\lib\site-packages\tensorflow\python\eager\execute.py in make_type(v, arg_name)
194 except TypeError:
195 raise TypeError("Expected DataType for argument '%s' not %s." %
--> 196 (arg_name, repr(v)))
197 i = v.as_datatype_enum
198 return i
TypeError: Expected DataType for argument 'Tout' not dtype('<M8[ns]').