这个问题困扰了一个星期,有人可以在这里找到问题吗?排除了其他部分的潜在错误。不管输入什么,查询SavedModel总是返回相同的结果。
// x and y_ are from the tagged data, y is the output from Neural Network
x = tf.placeholder(tf.float32,[None,300],name='x-input')
y_ = tf.placeholder(tf.float32,[None,7],name='y-input')
y = inference(x,regularizer,norm=True)
# some codes are omitted here for simplicity
// save the model
export_path = './savedmodel'
if tf.gfile.Exists(export_path):
tf.gfile.DeleteRecursively(export_path)
builder = tf.saved_model.builder.SavedModelBuilder(export_path)
tensor_info_x = tf.saved_model.utils.build_tensor_info(x)
tensor_info_y = tf.saved_model.utils.build_tensor_info(y)
prediction_signature = (
tf.saved_model.signature_def_utils.build_signature_def(
inputs={'xx': tensor_info_x},
outputs={'yy': tensor_info_y},
method_name=tf.saved_model.signature_constants.PREDICT_METHOD_NAME))
builder.add_meta_graph_and_variables(
sess, [tf.saved_model.tag_constants.SERVING],
signature_def_map={
tf.saved_model.signature_constants.DEFAULT_SERVING_SIGNATURE_DEF_KEY:
prediction_signature
})
builder.save()
// query the model
sess=tf.Session()
signature_key = tf.saved_model.signature_constants.DEFAULT_SERVING_SIGNATURE_DEF_KEY
input_key = 'xx'
output_key = 'yy'
export_path = './savedmodel'
meta_graph_def = tf.saved_model.loader.load(
sess,
[tf.saved_model.tag_constants.SERVING],
export_path)
signature = meta_graph_def.signature_def
x_tensor_name = signature[signature_key].inputs[input_key].name
y_tensor_name = signature[signature_key].outputs[output_key].name
x = sess.graph.get_tensor_by_name(x_tensor_name)
y = sess.graph.get_tensor_by_name(y_tensor_name)
x1 = [[num1, num2, ..., num300]]
print(sess.run(y, {x: x1}))