我将this Colab用于BERT模型。
在最后一个单元格中,为了进行预测,我们需要:
def getPrediction(in_sentences):
labels = ["Negative", "Positive"]
input_examples = [run_classifier.InputExample(guid="", text_a = x, text_b = None, label = 0) for x in in_sentences] # here, "" is just a dummy label
input_features = run_classifier.convert_examples_to_features(input_examples, label_list, MAX_SEQ_LENGTH, tokenizer)
predict_input_fn = run_classifier.input_fn_builder(features=input_features, seq_length=MAX_SEQ_LENGTH, is_training=False, drop_remainder=False)
predictions = estimator.predict(predict_input_fn)
return [(sentence, prediction['probabilities'], labels[prediction['labels']]) for sentence, prediction in zip(in_sentences, predictions)]
pred_sentences = [
"That movie was absolutely awful",
"The acting was a bit lacking",
"The film was creative and surprising",
"Absolutely fantastic!"
]
predictions = getPrediction(pred_sentences)
我想创建一个与TF服务一起使用的“ SavedModel”。如何为此模型创建SavedModel?
通常我将定义以下内容:
def serving_input_fn():
"""Create serving input function to be able to serve predictions later
using provided inputs
:return:
"""
feature_placeholders = {
'sentence': tf.placeholder(tf.string, [None]),
}
return tf.estimator.export.ServingInputReceiver(feature_placeholders,
feature_placeholders)
latest_ckpt = tf.train.latest_checkpoint(OUTPUT_DIR)
last_eval = estimator.evaluate(input_fn=test_input_fn, steps=None, checkpoint_path=latest_ckpt)
# Export the model to GCS for serving.
exporter = tf.estimator.LatestExporter('exporter', serving_input_fn, exports_to_keep=None)
exporter.export(estimator, OUTPUT_DIR, latest_ckpt, last_eval, is_the_final_export=True)
不确定如何定义我的“ tf.estimator.export.ServingInputReceiver”
答案 0 :(得分:0)
如果您查看笔记本中存在的 create_model 函数。这需要一些争论。这些是将传递给模型的功能。
您需要更新 serving_input_fn 函数以包括它们。
def serving_input_fn():
feature_spec = {
"input_ids" : tf.FixedLenFeature([MAX_SEQ_LENGTH], tf.int64),
"input_mask" : tf.FixedLenFeature([MAX_SEQ_LENGTH], tf.int64),
"segment_ids" : tf.FixedLenFeature([MAX_SEQ_LENGTH], tf.int64),
"label_ids" : tf.FixedLenFeature([], tf.int64)
}
serialized_tf_example = tf.placeholder(dtype=tf.string,
shape=[None],
name='input_example_tensor')
receiver_tensors = {'example': serialized_tf_example}
features = tf.parse_example(serialized_tf_example, feature_spec)
return tf.estimator.export.ServingInputReceiver(features, receiver_tensors)