我收到此错误,但不确定为什么。
下面是我的代码。我该如何解决?
def f_stocks_embed_module(cat_input_embed_dim, batch_size):
'''
Returning a Model that can be used as a Layer within the broader Keras Model.
'''
X = Input(shape = (1,), batch_size = batch_size)
cat_input = Input(shape = (1, ), batch_size = batch_size)
cat_input_add = Embedding(input_dim = cat_input_embed_dim, output_dim = 1)(cat_input)
cat_input_mult = Embedding(input_dim = cat_input_embed_dim, output_dim = 1)(cat_input)
cat_input_add = Flatten()(cat_input_add)
cat_input_mult = Flatten()(cat_input_mult)
x = Multiply()([X, cat_input_mult])
x = Add()([x, cat_input_add])
x = Dense(1)(x)
x = Add()([X, x])
model = Model(inputs = [X, cat_input], outputs = x)
return(model)
color_embed_dim = 7
clarity_embed_dim = 8
batch_size = 20
dense1 = 2**7
dense2 = 2**8
dense3 = 2**9
dropout = 0.8
price_loss = 1
cut_loss = 1
activation= LeakyReLU()
batch_size = 20
threshold = 0.7
#====================================================================
# INPUTS
#====================================================================
#----------------------------------------------------------------
carat = Input(
shape= (1,),
batch_size= batch_size,
name= 'carat'
)
#----------------------------------------------------------------
Color = Input(
shape= (1,),
batch_size= batch_size,
name= 'color'
)
#----------------------------------------------------------------
Clarity = Input(
shape= (1,),
batch_size= batch_size,
name= 'clarity'
)
#----------------------------------------------------------------
depth = Input(
shape= (1,),
batch_size= batch_size,
name= 'depth'
)
#----------------------------------------------------------------
table = Input(
shape= (1,),
batch_size= batch_size,
name= 'table'
)
#----------------------------------------------------------------
X = Input(
shape= (1,),
batch_size= batch_size,
name= 'x'
)
#----------------------------------------------------------------
y = Input(
shape= (1,),
batch_size= batch_size,
name= 'y'
)
#----------------------------------------------------------------
z = Input(
shape= (1,),
batch_size= batch_size,
name= 'z'
)
#----------------------------------------------------------------
#====================================================================
# CONCATENATE FEATURES
#====================================================================
Y = Concatenate()([carat, depth, table, X, y, z])
#====================================================================
# DENSE NETWORK FOR BOTH PRICE AND CUT
#====================================================================
Y = Dense(dense1, activation = activation)(Y)
Y = BatchNormalization()(Y)
Y = Dense(dense2, activation = activation)(Y)
Y = BatchNormalization()(Y)
#====================================================================
# DENSE NETWORK TO PREDICT CUT
#====================================================================
x = Dense(dense3, activation = activation)(Y)
x = BatchNormalization()(x)
x = Dropout(dropout)(x)
#====================================================================
# PREDICTING CUT USING THE EMBEDDINGS AND SKIP CONNECTIONS
# ====================================================================
x = Dense(1)(x)
#-------------------------------------------------------------
# THE EFFECT OF COLOR ON CUT
#-------------------------------------------------------------
model_embed_color_cut = f_stocks_embed_module(color_embed_dim, batch_size)
model_embed_clarity_cut = f_stocks_embed_module(clarity_embed_dim, batch_size)
x = model_embed_color_cut([x, Color])
# At this point the problem appears. Although my code is longer, for simplicity I cut it here and create a Model.
model = Model([carat, depth, table, X, y, z], x)
---------------------------------------------------------------------------
ValueError Traceback (most recent call last)
<ipython-input-182-acd9c88f235b> in <module>
149
150
--> 151 model = Model([carat, depth, table, X, y, z], x)
~\AppData\Roaming\Python\Python37\site-packages\tensorflow\python\keras\engine\training.py in __new__(cls, *args, **kwargs)
240 # Functional model
241 from tensorflow.python.keras.engine import functional # pylint: disable=g-import-not-at-top
--> 242 return functional.Functional(*args, **kwargs)
243 else:
244 return super(Model, cls).__new__(cls, *args, **kwargs)
~\AppData\Roaming\Python\Python37\site-packages\tensorflow\python\training\tracking\base.py in _method_wrapper(self, *args, **kwargs)
455 self._self_setattr_tracking = False # pylint: disable=protected-access
456 try:
--> 457 result = method(self, *args, **kwargs)
458 finally:
459 self._self_setattr_tracking = previous_value # pylint: disable=protected-access
~\AppData\Roaming\Python\Python37\site-packages\tensorflow\python\keras\engine\functional.py in __init__(self, inputs, outputs, name, trainable)
113 # 'arguments during initialization. Got an unexpected argument:')
114 super(Functional, self).__init__(name=name, trainable=trainable)
--> 115 self._init_graph_network(inputs, outputs)
116
117 @trackable.no_automatic_dependency_tracking
~\AppData\Roaming\Python\Python37\site-packages\tensorflow\python\training\tracking\base.py in _method_wrapper(self, *args, **kwargs)
455 self._self_setattr_tracking = False # pylint: disable=protected-access
456 try:
--> 457 result = method(self, *args, **kwargs)
458 finally:
459 self._self_setattr_tracking = previous_value # pylint: disable=protected-access
~\AppData\Roaming\Python\Python37\site-packages\tensorflow\python\keras\engine\functional.py in _init_graph_network(self, inputs, outputs)
189 # Keep track of the network's nodes and layers.
190 nodes, nodes_by_depth, layers, _ = _map_graph_network(
--> 191 self.inputs, self.outputs)
192 self._network_nodes = nodes
193 self._nodes_by_depth = nodes_by_depth
~\AppData\Roaming\Python\Python37\site-packages\tensorflow\python\keras\engine\functional.py in _map_graph_network(inputs, outputs)
929 'The following previous layers '
930 'were accessed without issue: ' +
--> 931 str(layers_with_complete_input))
932 for x in nest.flatten(node.outputs):
933 computable_tensors.add(id(x))
ValueError: Graph disconnected: cannot obtain value for tensor Tensor("color_9:0", shape=(20, 1), dtype=float32) at layer "functional_29". The following previous layers were accessed without issue: ['concatenate_9', 'dense_34', 'batch_normalization_16', 'dense_35', 'batch_normalization_17', 'dense_36', 'batch_normalization_18', 'dropout_6', 'dense_37']