我尝试了以下代码来重用已保存的BERT模型。
def serving_input_receiver_fn():
feature_spec = {
"input_ids" : tf.FixedLenFeature([MAX_SEQ_LENGTH], tf.int64),
"input_mask" : tf.FixedLenFeature([MAX_SEQ_LENGTH], tf.int64),
"segment_ids" : tf.FixedLenFeature([MAX_SEQ_LENGTH], tf.int64),
"label_ids" : tf.FixedLenFeature([], tf.int64)
}
serialized_tf_example = tf.placeholder(dtype=tf.string,
shape=[None],
name='input_example_tensor')
print(serialized_tf_example, "serialized_tf_example")
print(serialized_tf_example.shape, "Shape")
receiver_tensors = {'example': serialized_tf_example}
print(receiver_tensors, "receiver_tensors")
features = tf.parse_example(serialized_tf_example, feature_spec)
return tf.estimator.export.ServingInputReceiver(features, receiver_tensors)
export_path = './BERTmodel/Data/'
但是我收到以下错误:'无法为形状为((?,)'的张量'input_example_tensor:0'供给形状()的值
我尝试了以下代码进行预测。
有人可以建议我吗?
pred_sentences = ["The site is great", "I think it's not good"]
def getPrediction(in_sentences):
labels = ["Negative", "Positive", "Neutral"]
input_examples = [run_classifier.InputExample(guid="", text_a = x, text_b = None, label = 0) for x in in_sentences]
input_features = run_classifier.convert_examples_to_features(input_examples, label_list, MAX_SEQ_LENGTH, tokenizer)
predict_input_fn = run_classifier.input_fn_builder(features=input_features, seq_length=MAX_SEQ_LENGTH, is_training=False, drop_remainder=False)
return predict_input_fn
from tensorflow.contrib import predictor
with tf.Session() as sess:
predict_fn = predictor.from_saved_model('model_path')
predictions = predict_fn({"example": getPrediction(pred_sentences)})
print(predictions)