我使用TF Estimator创建了一个简单模型,并使用export_savedmodel
函数保存了模型。我使用一个简单的Iris数据集,它具有4个功能。
num_epoch = 50
num_train = 120
num_test = 30
# 1 Define input function
def input_function(x, y, is_train):
dict_x = {
"thisisinput" : x,
}
dataset = tf.data.Dataset.from_tensor_slices((
dict_x, y
))
if is_train:
dataset = dataset.shuffle(num_train).repeat(num_epoch).batch(num_train)
else:
dataset = dataset.batch(num_test)
return dataset
def my_serving_input_fn():
input_data = {
"thisisinput" : tf.placeholder(tf.float32, [None, 4], name='inputtensors')
}
return tf.estimator.export.ServingInputReceiver(input_data, input_data)
def main(argv):
tf.set_random_seed(1103) # avoiding different result of random
# 2 Define feature columns
feature_columns = [
tf.feature_column.numeric_column(key="thisisinput",shape=4),
]
# 3 Define an estimator
classifier = tf.estimator.DNNClassifier(
feature_columns=feature_columns,
hidden_units=[10],
n_classes=3,
optimizer=tf.train.GradientDescentOptimizer(0.001),
activation_fn=tf.nn.relu,
model_dir = 'modeliris2/'
)
# Train the model
classifier.train(
input_fn=lambda:input_function(xtrain, ytrain, True)
)
# Evaluate the model
eval_result = classifier.evaluate(
input_fn=lambda:input_function(xtest, ytest, False)
)
print('\nTest set accuracy: {accuracy:0.3f}\n'.format(**eval_result))
print('\nSaving models...')
classifier.export_savedmodel("modeliris2pb", my_serving_input_fn)
if __name__ == "__main__":
os.environ['TF_CPP_MIN_LOG_LEVEL'] = '3'
tf.app.run(main)
运行程序后,它将生成一个包含saved_model.pb
的文件夹。我看到许多教程建议使用contrib.predictor
来加载saved_model.pb
,但我不能。我使用了contrib.predictor
函数来加载模型:
def main(a):
with tf.Session() as sess:
PB_PATH= "modeliris2pb/1536219836/"
predict_fn = predictor.from_saved_model(PB_PATH)
if __name__=="__main__":
main()
但是会产生错误:
ValueError:得到了signature_def_key“ serving_default”。可用的 签名为['predict']。原始错误:没有带签名的SignatureDef 在MetaGraphDef中找到“ serving_default”。
还有另一种更好的方式加载* .pb文件吗?为什么会发生此错误?我怀疑是因为my_serving_input_fn()
函数,但我不知道为什么
答案 0 :(得分:1)
我遇到了同样的问题,我尝试在网络上搜索,但是对此没有任何解释,所以我尝试了不同的方法:
首先,您需要以dict格式定义特征长度,如下所示:
feature_spec = {'x': tf.FixedLenFeature([4],tf.float32)}
然后,您必须构建一个具有相同形状特征的占位符的函数,然后使用tf.estimator.export.ServingInputReceiver返回
def serving_input_receiver_fn():
serialized_tf_example = tf.placeholder(dtype=tf.string,
shape=[None],
name='input_tensors')
receiver_tensors = {'inputs': serialized_tf_example}
features = tf.parse_example(serialized_tf_example, feature_spec)
return tf.estimator.export.ServingInputReceiver(features, receiver_tensors)
然后仅使用export_savedmodel保存:
classifier.export_savedmodel(dir_path, serving_input_receiver_fn)
完整的示例代码:
import os
from six.moves.urllib.request import urlopen
import numpy as np
import tensorflow as tf
dir_path = os.path.dirname('.')
IRIS_TRAINING = os.path.join(dir_path, "iris_training.csv")
IRIS_TEST = os.path.join(dir_path, "iris_test.csv")
feature_spec = {'x': tf.FixedLenFeature([4],tf.float32)}
def serving_input_receiver_fn():
serialized_tf_example = tf.placeholder(dtype=tf.string,
shape=[None],
name='input_tensors')
receiver_tensors = {'inputs': serialized_tf_example}
features = tf.parse_example(serialized_tf_example, feature_spec)
return tf.estimator.export.ServingInputReceiver(features, receiver_tensors)
def main():
training_set = tf.contrib.learn.datasets.base.load_csv_with_header(
filename=IRIS_TRAINING,
target_dtype=np.int,
features_dtype=np.float32)
test_set = tf.contrib.learn.datasets.base.load_csv_with_header(
filename=IRIS_TEST,
target_dtype=np.int,
features_dtype=np.float32)
feature_columns = [tf.feature_column.numeric_column("x", shape=[4])]
classifier = tf.estimator.DNNClassifier(feature_columns=feature_columns,
hidden_units=[10, 20, 10],
n_classes=3,
model_dir=dir_path)
# Define the training inputs
train_input_fn = tf.estimator.inputs.numpy_input_fn(
x={"x": np.array(training_set.data)},
y=np.array(training_set.target),
num_epochs=None,
shuffle=True)
# Train model.
classifier.train(input_fn=train_input_fn, steps=200)
classifier.export_savedmodel(dir_path, serving_input_receiver_fn)
if __name__ == "__main__":
main()
现在让我们恢复模型:
import tensorflow as tf
import os
dir_path = os.path.dirname('.') #current directory
exported_path= os.path.join(dir_path, "1536315752")
def main():
with tf.Session() as sess:
tf.saved_model.loader.load(sess, [tf.saved_model.tag_constants.SERVING], exported_path)
model_input= tf.train.Example(features=tf.train.Features(feature={
'x': tf.train.Feature(float_list=tf.train.FloatList(value=[6.4, 3.2, 4.5, 1.5]))
}))
predictor= tf.contrib.predictor.from_saved_model(exported_path)
input_tensor=tf.get_default_graph().get_tensor_by_name("input_tensors:0")
model_input=model_input.SerializeToString()
output_dict= predictor({"inputs":[model_input]})
print(" prediction is " , output_dict['scores'])
if __name__ == "__main__":
main()
这是Ipython notebook demo个示例,其中包含数据和说明: