import requests
import numpy as np
import json
from sklearn.preprocessing import StandardScaler
from tensorflow import keras
import matplotlib.pyplot as plt



root_url = "http://yunserver:8501"
url = "%s/v1/models/mnist/metadata" % root_url
# resp = requests.get(url)
# print(resp)


img5=plt.imread(r"E:\testDir\ocr\idcard\20200715093621618_nums_1.jpg")
print("img5:",img5.shape)
print(img5)

scaler = StandardScaler()
scaler.__setattr__("n_features_in_",1)
scaler.__setattr__("n_samples_seen_",47040000)
scaler.__setattr__("mean_",np.array([33.31842145]))
scaler.__setattr__("var_",np.array([6172.85049713]))
scaler.__setattr__("scale_",np.array([78.56749008]))

x_test_scaled = scaler.transform(img5.astype(np.float32).reshape(-1,1)).reshape(-1,28,28,1)
print(x_test_scaled)
#
data = {
   "instances": x_test_scaled[0:18].tolist()
}
dicJson = json.dumps(data)
url = "%s/v1/models/mnist:predict" % root_url
ret = requests.post(url, data=dicJson)
if ret.status_code == 200:
    result = json.loads(ret.text)
    predict = np.argmax(np.array(result['predictions']),1)
    print(predict)
else:
	print("error")
