binoua commited on
Commit
8675aad
1 Parent(s): 703811e

chore: restore json

Browse files
Files changed (2) hide show
  1. handler.py +14 -3
  2. play_with_endpoint.py +25 -6
handler.py CHANGED
@@ -3,6 +3,17 @@ import numpy as np
3
  from concrete.ml.deployment import FHEModelServer
4
 
5
 
 
 
 
 
 
 
 
 
 
 
 
6
  class EndpointHandler:
7
  def __init__(self, path=""):
8
 
@@ -19,12 +30,12 @@ class EndpointHandler:
19
  """
20
 
21
  # Get inputs
22
- encrypted_inputs = data.pop("encrypted_inputs", data)
23
 
24
  # Get keys
25
- evaluation_keys = data.pop("evaluation_keys", data)
26
 
27
  # Run CML prediction
28
  encrypted_prediction = self.fhemodel_server.run(encrypted_inputs, evaluation_keys)
29
 
30
- return encrypted_prediction
 
3
  from concrete.ml.deployment import FHEModelServer
4
 
5
 
6
+ def from_json(python_object):
7
+ if "__class__" in python_object:
8
+ return bytes(python_object["__value__"])
9
+
10
+
11
+ def to_json(python_object):
12
+ if isinstance(python_object, bytes):
13
+ return {"__class__": "bytes", "__value__": list(python_object)}
14
+ raise TypeError(repr(python_object) + " is not JSON serializable")
15
+
16
+
17
  class EndpointHandler:
18
  def __init__(self, path=""):
19
 
 
30
  """
31
 
32
  # Get inputs
33
+ encrypted_inputs = from_json(data.pop("encrypted_inputs", data))
34
 
35
  # Get keys
36
+ evaluation_keys = from_json(data.pop("evaluation_keys", data))
37
 
38
  # Run CML prediction
39
  encrypted_prediction = self.fhemodel_server.run(encrypted_inputs, evaluation_keys)
40
 
41
+ return to_json(encrypted_prediction)
play_with_endpoint.py CHANGED
@@ -12,15 +12,26 @@ from concrete.ml.deployment import FHEModelClient
12
  import requests
13
 
14
 
 
 
 
 
 
 
 
 
 
 
 
15
  API_URL = "https://puqif7goarh132kl.us-east-1.aws.endpoints.huggingface.cloud"
16
  headers = {
17
  "Authorization": "Bearer " + os.environ.get("HF_TOKEN"),
18
- "Content-Type": "application/octet-stream",
19
  }
20
 
21
 
22
  def query(payload):
23
- response = requests.post(API_URL, headers=headers, data=payload)
24
  return response.json()
25
 
26
 
@@ -47,26 +58,34 @@ for i in range(nb_samples):
47
  # Quantize the input and encrypt it
48
  encrypted_inputs = fhemodel_client.quantize_encrypt_serialize([X_test[i]])
49
 
 
 
 
 
50
  # Prepare the payload, including the evaluation keys which are needed server side
51
  payload = {
52
  "inputs": "fake",
53
- "encrypted_inputs": encrypted_inputs,
54
- "evaluation_keys": evaluation_keys,
55
  }
56
 
57
  # Run the inference on HF servers
58
  duration -= time.time()
 
59
  encrypted_prediction = query(payload)
60
  duration += time.time()
 
61
 
62
- encrypted_prediction = encrypted_prediction
63
 
64
  # Decrypt the result and dequantize
65
  prediction_proba = fhemodel_client.deserialize_decrypt_dequantize(encrypted_prediction)[0]
66
  prediction = np.argmax(prediction_proba)
67
 
68
  if verbose or True:
69
- print(f"for {i}-th input, {prediction=} with expected {y_test[i]}")
 
 
70
 
71
  # Measure accuracy
72
  nb_good += y_test[i] == prediction
 
12
  import requests
13
 
14
 
15
+ def to_json(python_object):
16
+ if isinstance(python_object, bytes):
17
+ return {"__class__": "bytes", "__value__": list(python_object)}
18
+ raise TypeError(repr(python_object) + " is not JSON serializable")
19
+
20
+
21
+ def from_json(python_object):
22
+ if "__class__" in python_object:
23
+ return bytes(python_object["__value__"])
24
+
25
+
26
  API_URL = "https://puqif7goarh132kl.us-east-1.aws.endpoints.huggingface.cloud"
27
  headers = {
28
  "Authorization": "Bearer " + os.environ.get("HF_TOKEN"),
29
+ "Content-Type": "application/json",
30
  }
31
 
32
 
33
  def query(payload):
34
+ response = requests.post(API_URL, headers=headers, json=payload)
35
  return response.json()
36
 
37
 
 
58
  # Quantize the input and encrypt it
59
  encrypted_inputs = fhemodel_client.quantize_encrypt_serialize([X_test[i]])
60
 
61
+ if verbose:
62
+ print(f"Size of encrypted input: {sys.getsizeof(encrypted_inputs) / 1024 / 1024} megabytes")
63
+ print(f"Size of keys: {sys.getsizeof(evaluation_keys) / 1024 / 1024} megabytes")
64
+
65
  # Prepare the payload, including the evaluation keys which are needed server side
66
  payload = {
67
  "inputs": "fake",
68
+ "encrypted_inputs": to_json(encrypted_inputs),
69
+ "evaluation_keys": to_json(evaluation_keys),
70
  }
71
 
72
  # Run the inference on HF servers
73
  duration -= time.time()
74
+ duration_inference = -time.time()
75
  encrypted_prediction = query(payload)
76
  duration += time.time()
77
+ duration_inference += time.time()
78
 
79
+ encrypted_prediction = from_json(encrypted_prediction)
80
 
81
  # Decrypt the result and dequantize
82
  prediction_proba = fhemodel_client.deserialize_decrypt_dequantize(encrypted_prediction)[0]
83
  prediction = np.argmax(prediction_proba)
84
 
85
  if verbose or True:
86
+ print(
87
+ f"for {i}-th input, {prediction=} with expected {y_test[i]} in {duration_inference} seconds"
88
+ )
89
 
90
  # Measure accuracy
91
  nb_good += y_test[i] == prediction