嗨,大家好吗?请把我从这个问题中解救出来。因此,我已经使用Azure ML服务(:(文档很烂)部署了一个模型。我的项目围绕计算机视觉,我想测试Web服务,但它返回Unauthorized, no Authorization header
。但是当我发布到URL时,它返回一个200个响应(可以),但是图像不属于哪个类别以及模型的精确度。
我已经建立了该模型的REST API,我将共享代码。这是REST API,可以独立(在本地)运行,当我使用Postman时,我得到的回报是预测值和准确性得分。
# import the necessary packages
from keras.models import load_model
import tensorflow as tf
from keras.preprocessing.image import img_to_array
from PIL import Image
from PIL import ImageFile
import numpy as np
from keras import backend as K
from flask import Flask
import flask
#Azure stuff I don't know but are needed.
from azureml.contrib.services.aml_request import AMLRequest, rawhttp
from azureml.contrib.services.aml_response import AMLResponse
# manipulate files
import io
import os
import json
import requests
# AzureML stuff to consider, checks for the registered models.
from azureml.core.model import Model
app = Flask(__name__)
def init():
global model
#Get the path where the deployed model can be found.
#load models
model = Model.get_model_path(model_name='large_models_1')
global graph
graph = tf.get_default_graph()
print("* Model Loaded *, this is the init() * ")
# prepares image for prediction
def prepare_image(image, target):
# if the image mode is not RGB, convert it
if image.mode != "RGB":
image = image.convert("RGB")
# resize the input image and preprocess it
ImageFile.LOAD_TRUNCATED_IMAGES = True
image = image.resize(target)
image = img_to_array(image)/255.
image = np.expand_dims(image, axis=0)
image = np.vstack([image])
return image
@app.route("/predict", methods=["POST"])
def run():
print("This is run () ")
K.clear_session() # making new predictions.
# wanna see the error.
try:
if flask.request.method == 'POST':
if flask.request.files.get("image"):
image = flask.request.files["image"].read()
image = Image.open(io.BytesIO(image))
image_files = prepare_image(image, target=(160,160)) # prepares the images to be loaded to the model.
with graph.as_default():
prediction = model.predict(image_files).tolist()
print(prediction)
response = {
"prediction":{
"anomaly": prediction[0][0],
"normal": prediction[0][1],
} # JSON response
} #end response
return json.dumps(response, indent=4)
else:
print("Error 500, bad request dumb ass !.")
# Let's see the error.
except Exception as e:
result = str(e)
return json.dumps(result, indent=4)
这是问题所在,我使用了AKS(Azure Kubernetes服务)进行部署,并且不得不更改score.py以匹配二进制数据(图像),这是来自该链接“ {{3 }}“
from azureml.contrib.services.aml_request import AMLRequest, rawhttp
from azureml.contrib.services.aml_response import AMLResponse
def init():
print("This is init()")
@rawhttp
def run(request):
print("This is run()")
print("Request: [{0}]".format(request))
if request.method == 'GET':
# For this example, just return the URL for GETs.
respBody = str.encode(request.full_path)
return AMLResponse(respBody, 200)
elif request.method == 'POST':
reqBody = request.get_data(False)
# For a real-world solution, you would load the data from reqBody
# and send it to the model. Then return the response.
# For demonstration purposes, this example just returns the posted data as the response.
return AMLResponse(reqBody, 200)
else:
return AMLResponse("bad request", 500)
我的问题是将图像发布到URL时如何执行正确的测试?我已经尝试过使用本教程,但似乎没有帮助,我听不懂。请帮我 !!!!!!!!!!!!!!!!!!!顺便说一下,我感觉这涉及到了score.py和run()函数。
答案 0 :(得分:0)
我从同一教程开始,并且正在init()和run()(至少对我来说): 请注意,我不使用AKS,但使用Azure容器实例。我相当确定它不会改变(除了测试部分中提到的自动化标头(见下文)之外没有任何改变
%%writefile scorebinary.py
import json
import numpy as np
import os
from tensorflow.keras.models import load_model
import PIL
from io import BytesIO
from azureml.contrib.services.aml_request import AMLRequest, rawhttp
from azureml.contrib.services.aml_response import AMLResponse
def DataPrepImage(rawimage):
Def=200
img = rawimage.resize((Def,Def), resample=PIL.Image.BILINEAR)
img = (np.array(img)/255).reshape(-1,Def,Def,1)
return img
def init():
global network
# AZUREML_MODEL_DIR is an environment variable created during deployment.
# It is the path to the model folder (./azureml-models/$MODEL_NAME/$VERSION)
# For multiple models, it points to the folder containing all deployed models (./azureml-models)
folder = os.getenv('AZUREML_MODEL_DIR')
if (folder==None): #Test hors docker
folder = "."
model_path = os.path.join(folder, 'Reseau_Siamois_3_36.h5')
#On charge le model Keras
network = load_model(model_path)
@rawhttp
def run(request):
if request.method == 'POST':
reqBody = request.get_data(False)
myImage = PIL.Image.open(BytesIO(reqBody))
myImage = myImage.convert('L')
#Dataprep de l'image
imgprepped = DataPrepImage(myImage)
# make prediction
embed = network.predict(imgprepped)
return {'emb':embed.tolist(),'imgpreped':imgprepped.tolist()}
else:
return AMLResponse("bad request, use POST", 500)
要测试它,我用它。现在,对于AKS,请查看注释(不是我的,它们来自Azure教程),它可以解决您的4xx错误问题
import requests
import PIL
import json
import matplotlib.pyplot as plt
%matplotlib inline
img = open('cat.jpg', 'rb').read()
headers = {'Content-Type':'application/json'}
# for AKS deployment you'd need to the service key in the header as well
# api_key = service.get_key()
# headers = {'Content-Type':'application/json', 'Authorization':('Bearer '+ api_key)}
resp = requests.post(service.scoring_uri,data=img, headers=headers)
#print("POST to url", service.scoring_uri)
responsedata = json.loads(resp.text)
emb = responsedata['emb']
print("prediction:", emb)
img = np.array(responsedata['imgpreped'])
plt.axis("off")
plt.imshow(img[0,:,:,0],vmin=0, vmax=1,cmap='Greys')
最后不要忘记加载必要的程序包:
from azureml.core.conda_dependencies import CondaDependencies
myenv = CondaDependencies()
myenv.add_conda_package("tensorflow")
myenv.add_conda_package("pillow")
myenv.add_pip_package("azureml-contrib-services")
with open("myenv.yml","w") as f:
f.write(myenv.serialize_to_string())
with open("myenv.yml","r") as f:
print(f.read())