在加载保存的模型时,我尝试: 请注意,example_proto是一个tf.example对象
predict_fn = tf.contrib.predictor.from_saved_model(saved_model_dir)
a = {'inputs': [{'examples': str(example_proto.SerializeToString())}]}
t1 = datetime.datetime.now()
print(predict_fn(a))
print((datetime.datetime.now() - t1).total_seconds())
当实际保存模型时,我尝试了
def serving_input_receiver_fn():
default_batch_size = 1
"""An input receiver that expects a serialized tf.Example."""
serialized_tf_example = tf.placeholder(dtype=tf.string,
shape=[default_batch_size],
name='input_example_tensor')
receiver_tensors = {'examples': serialized_tf_example}
# print(receiver_tensors.numpy())
serialized_tf_example = tf.Print(serialized_tf_example,
[serialized_tf_example],
"serialized_tf_example: ")
features = tf.parse_example(serialized_tf_example, feature_description)
return tf.estimator.export.ServingInputReceiver(features, receiver_tensors)
和
estimator.export_savedmodel(
make_checkpoint_folder("serving_dir1"),
serving_input_receiver_fn)
我收到关注错误
InternalError: Unable to get element as bytes.