67 lines
1.6 KiB
Python
67 lines
1.6 KiB
Python
|
|
|
|
from flask import Flask, request,jsonify ,Response
|
|
import msgpack
|
|
import numpy as np
|
|
app = Flask(__name__)
|
|
|
|
import gzip
|
|
|
|
|
|
|
|
|
|
import onnxruntime
|
|
|
|
ort_session = onnxruntime.InferenceSession("super_resolution.onnx", providers=["CPUExecutionProvider"])
|
|
|
|
@app.route('/')
|
|
def hello_world():
|
|
return 'hello world'
|
|
|
|
@app.route('/post', methods=['POST'])
|
|
def register():
|
|
# print(request.headers)
|
|
# print(request.data)
|
|
# print(len(request.data))
|
|
data=msgpack.unpackb(gzip.decompress(request.data) )
|
|
|
|
# print(len(data["c"]),data["c"])
|
|
|
|
|
|
data=np.frombuffer(data["spectral_data_bin"],dtype=np.uint16).reshape(int(len(data["spectral_data_bin"])/224/512/2),1,224,512)
|
|
data=np.mean(data,axis=0, keepdims=True)
|
|
# print(data.shape)
|
|
|
|
# input_data=torch.tensor(data,dtype=torch.float32)/4095
|
|
# output=model(input_data)
|
|
# print(output.shape)
|
|
|
|
ort_inputs = {ort_session.get_inputs()[0].name: data.astype(np.float32)/4095}
|
|
output = ort_session.run(None, ort_inputs)[0]
|
|
print(output)
|
|
|
|
response={}
|
|
response["Temperature"]= float(output[0,0]*(1663-1496)+1496)
|
|
response["C"]= float(output[0,1]*(0.829-0.079)+0.079)
|
|
response["P"]=float(output[0,1]*(0.797-0.001)+0.001)
|
|
response["S"]=0.04402615384615385
|
|
response["Mn"]=0.138787
|
|
response["Ni"]=0.035104
|
|
response["Mo"]=0.093789
|
|
response["Cr"]=0.002983
|
|
d=gzip.compress(msgpack.packb(response))
|
|
print(d)
|
|
|
|
# d=jsonify(response)
|
|
# print(d,type(d))
|
|
|
|
response = Response()
|
|
response.data=d
|
|
|
|
print(msgpack.unpackb(gzip.decompress(d)))
|
|
|
|
|
|
return response
|
|
|
|
if __name__ == '__main__':
|
|
app.run(debug=True,host="0.0.0.0",port=22111) |