我看到了一些有类似问题的问题,但我还没有发现我的代码有什么问题。我正在尝试训练和设置一个基本的 Keras 模型。我有以下几点:
from keras import layers
from keras import models
import pandas as pd
model = models.Sequential()
model.add(layers.Dense(2, activation='relu', input_shape=(2,)))
model.add(layers.Dense(1, activation='relu'))
还有一些看起来像这样的数据:
housing_final
value floorArea bedrooms
0 458000 990 3
1 775000 1022 3
2 875000 1259 3
3 280000 699 3
4 410000 1184 3
... ... ... ...
24559 500000 1119 3
24560 560000 1593 4
24561 400000 1388 3
24562 530000 990 3
24563 350000 990 3
24564 rows × 3 columns
然后我这样做:
val_dataframe = housing_final.sample(frac=0.2, random_state=1337)
train_dataframe = housing_final.drop(val_dataframe.index)
print(
"Using %d samples for training and %d for validation"
% (len(train_dataframe), len(val_dataframe))
)
import tensorflow as tf
def dataframe_to_dataset(dataframe):
dataframe = dataframe.copy()
labels = dataframe.pop("value")
ds = tf.data.Dataset.from_tensor_slices((dict(dataframe), labels))
ds = ds.shuffle(buffer_size=len(dataframe))
return ds
train_ds = dataframe_to_dataset(train_dataframe)
val_ds = dataframe_to_dataset(val_dataframe)
train_ds = train_ds.batch(32)
val_ds = val_ds.batch(32)
model.compile("adam", "binary_crossentropy", metrics=["accuracy"])
model.fit(train_ds, epochs=50, validation_data=val_ds)
但是我在标题中看到了错误。我完全不确定我在这里做错了什么。我猜想与输入数据存在一些尺寸不匹配,但尚不清楚。我将在下面粘贴完整的堆栈跟踪:
---------------------------------------------------------------------------
ValueError Traceback (most recent call last)
<ipython-input-47-43091e144d61> in <module>
----> 1 model.fit(train_ds, epochs=50, validation_data=val_ds)
~/.local/lib/python3.9/site-packages/keras/engine/training.py in fit(self, x, y, batch_size, epochs, verbose, callbacks, validation_split, validation_data, shuffle, class_weight, sample_weight, initial_epoch, steps_per_epoch, validation_steps, validation_batch_size, validation_freq, max_queue_size, workers, use_multiprocessing)
1156 _r=1):
1157 callbacks.on_train_batch_begin(step)
-> 1158 tmp_logs = self.train_function(iterator)
1159 if data_handler.should_sync:
1160 context.async_wait()
~/.local/lib/python3.9/site-packages/tensorflow/python/eager/def_function.py in __call__(self, *args, **kwds)
887
888 with OptionalXlaContext(self._jit_compile):
--> 889 result = self._call(*args, **kwds)
890
891 new_tracing_count = self.experimental_get_tracing_count()
~/.local/lib/python3.9/site-packages/tensorflow/python/eager/def_function.py in _call(self, *args, **kwds)
931 # This is the first call of __call__, so we have to initialize.
932 initializers = []
--> 933 self._initialize(args, kwds, add_initializers_to=initializers)
934 finally:
935 # At this point we know that the initialization is complete (or less
~/.local/lib/python3.9/site-packages/tensorflow/python/eager/def_function.py in _initialize(self, args, kwds, add_initializers_to)
761 self._graph_deleter = FunctionDeleter(self._lifted_initializer_graph)
762 self._concrete_stateful_fn = (
--> 763 self._stateful_fn._get_concrete_function_internal_garbage_collected( # pylint: disable=protected-access
764 *args, **kwds))
765
~/.local/lib/python3.9/site-packages/tensorflow/python/eager/function.py in _get_concrete_function_internal_garbage_collected(self, *args, **kwargs)
3048 args, kwargs = None, None
3049 with self._lock:
-> 3050 graph_function, _ = self._maybe_define_function(args, kwargs)
3051 return graph_function
3052
~/.local/lib/python3.9/site-packages/tensorflow/python/eager/function.py in _maybe_define_function(self, args, kwargs)
3442
3443 self._function_cache.missed.add(call_context_key)
-> 3444 graph_function = self._create_graph_function(args, kwargs)
3445 self._function_cache.primary[cache_key] = graph_function
3446
~/.local/lib/python3.9/site-packages/tensorflow/python/eager/function.py in _create_graph_function(self, args, kwargs, override_flat_arg_shapes)
3277 arg_names = base_arg_names + missing_arg_names
3278 graph_function = ConcreteFunction(
-> 3279 func_graph_module.func_graph_from_py_func(
3280 self._name,
3281 self._python_function,
~/.local/lib/python3.9/site-packages/tensorflow/python/framework/func_graph.py in func_graph_from_py_func(name, python_func, args, kwargs, signature, func_graph, autograph, autograph_options, add_control_dependencies, arg_names, op_return_value, collections, capture_by_value, override_flat_arg_shapes)
997 _, original_func = tf_decorator.unwrap(python_func)
998
--> 999 func_outputs = python_func(*func_args, **func_kwargs)
1000
1001 # invariant: `func_outputs` contains only Tensors, CompositeTensors,
~/.local/lib/python3.9/site-packages/tensorflow/python/eager/def_function.py in wrapped_fn(*args, **kwds)
670 # the function a weak reference to itself to avoid a reference cycle.
671 with OptionalXlaContext(compile_with_xla):
--> 672 out = weak_wrapped_fn().__wrapped__(*args, **kwds)
673 return out
674
~/.local/lib/python3.9/site-packages/tensorflow/python/framework/func_graph.py in wrapper(*args, **kwargs)
984 except Exception as e: # pylint:disable=broad-except
985 if hasattr(e, "ag_error_metadata"):
--> 986 raise e.ag_error_metadata.to_exception(e)
987 else:
988 raise
ValueError: in user code:
/Users/logankilpatrick/.local/lib/python3.9/site-packages/keras/engine/training.py:830 train_function *
return step_function(self, iterator)
/Users/logankilpatrick/.local/lib/python3.9/site-packages/keras/engine/training.py:813 run_step *
outputs = model.train_step(data)
/Users/logankilpatrick/.local/lib/python3.9/site-packages/keras/engine/training.py:770 train_step *
y_pred = self(x, training=True)
/Users/logankilpatrick/.local/lib/python3.9/site-packages/keras/engine/base_layer.py:989 __call__ *
input_spec.assert_input_compatibility(self.input_spec, inputs, self.name)
/Users/logankilpatrick/.local/lib/python3.9/site-packages/keras/engine/input_spec.py:197 assert_input_compatibility *
raise ValueError('Layer ' + layer_name + ' expects ' +
ValueError: Layer sequential_1 expects 1 input(s), but it received 2 input tensors. Inputs received: [<tf.Tensor 'ExpandDims:0' shape=(None, 1) dtype=int64>, <tf.Tensor 'ExpandDims_1:0' shape=(None, 1) dtype=float64>]
答案 0 :(得分:1)
您需要连接特征 floorArea
和 bedrooms
,以便批次的最终形状为 ( batch_size , 2 )
。您当前的实现为您提供了两个张量,或形状 ( batch_size , 1 )
和 ( batch_size , 1 )
。
def dataframe_to_dataset(dataframe):
labels = dataframe[ 'value' ].values
features = np.concatenate( ( dataframe[ 'bedrooms' ].values , dataframe[ 'floorArea' ].values ) , axis=1 )
ds = tf.data.Dataset.from_tensor_slices(( features , labels ))
ds = ds.shuffle(buffer_size=len(dataframe))
return ds