File size: 2,587 Bytes
9a997e4
 
 
 
 
 
 
a241bb3
18ba8c1
a241bb3
 
 
18ba8c1
a241bb3
 
 
 
 
9a997e4
 
a241bb3
9a997e4
 
 
 
 
 
 
 
 
 
 
18ba8c1
9a997e4
 
 
 
18ba8c1
9a997e4
 
a241bb3
 
 
9a997e4
 
18ba8c1
9a997e4
 
18ba8c1
9a997e4
 
 
 
 
 
 
18ba8c1
9a997e4
18ba8c1
9a997e4
18ba8c1
9a997e4
 
 
 
 
 
 
 
 
 
ec21179
9a997e4
ec21179
9a997e4
 
18ba8c1
 
 
 
 
 
 
 
9a997e4
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
"""Train and compile the model."""

import shutil
import numpy
import pandas
import pickle

from settings import (
    DEPLOYMENT_PATH,
    DATA_PATH, 
    INPUT_SLICES, 
    PRE_PROCESSOR_USER_PATH, 
    PRE_PROCESSOR_BANK_PATH,
    PRE_PROCESSOR_THIRD_PARTY_PATH,
    USER_COLUMNS,
    BANK_COLUMNS,
    THIRD_PARTY_COLUMNS,
)
from utils.client_server_interface import MultiInputsFHEModelDev
from utils.model import MultiInputXGBClassifier
from utils.pre_processing import get_pre_processors


def get_processed_multi_inputs(data):
    return (
        data[:, INPUT_SLICES["user"]], 
        data[:, INPUT_SLICES["bank"]], 
        data[:, INPUT_SLICES["third_party"]]
    )

print("Load and pre-process the data")

# Load the data
data = pandas.read_csv(DATA_PATH, encoding="utf-8")

# Define input and target data
data_x = data.copy()
data_y = data_x.pop("Target").copy().to_frame()

# Get data from all parties
data_user = data_x[USER_COLUMNS].copy()
data_bank = data_x[BANK_COLUMNS].copy()
data_third_party = data_x[THIRD_PARTY_COLUMNS].copy()

# Feature engineer the data
pre_processor_user, pre_processor_bank, pre_processor_third_party = get_pre_processors()

preprocessed_data_user = pre_processor_user.fit_transform(data_user)
preprocessed_data_bank = pre_processor_bank.fit_transform(data_bank)
preprocessed_data_third_party = pre_processor_third_party.fit_transform(data_third_party)

preprocessed_data_x = numpy.concatenate((preprocessed_data_user, preprocessed_data_bank, preprocessed_data_third_party), axis=1)


print("\nTrain and compile the model")

model = MultiInputXGBClassifier(max_depth=3, n_estimators=20)

model, sklearn_model = model.fit_benchmark(preprocessed_data_x, data_y)
 
multi_inputs_train = get_processed_multi_inputs(preprocessed_data_x)

model.compile(*multi_inputs_train, inputs_encryption_status=["encrypted", "encrypted", "encrypted"])

# Delete the deployment folder and its content if it already exists
if DEPLOYMENT_PATH.is_dir():
    shutil.rmtree(DEPLOYMENT_PATH)


print("\nSave deployment files")

# Save files needed for deployment (and enable cross-platform deployment)
fhe_dev = MultiInputsFHEModelDev(DEPLOYMENT_PATH, model)
fhe_dev.save(via_mlir=True)

# Save pre-processors
with (
    PRE_PROCESSOR_USER_PATH.open('wb') as file_user, 
    PRE_PROCESSOR_BANK_PATH.open('wb') as file_bank,
    PRE_PROCESSOR_THIRD_PARTY_PATH.open('wb') as file_third_party,
):
    pickle.dump(pre_processor_user, file_user)
    pickle.dump(pre_processor_bank, file_bank)
    pickle.dump(pre_processor_third_party, file_third_party)

print("\nDone !")