andrewssobral commited on
Commit
b7e9713
1 Parent(s): 48946cf

Added gradient boosting regressor example

Browse files
scikit-learn/gradient_boosting_regressor/client.py ADDED
@@ -0,0 +1,63 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ #!/usr/bin/env python3
2
+ # -*- coding: utf-8 -*-
3
+
4
+ import sys
5
+ import argparse
6
+ import numpy as np
7
+
8
+ import tritonclient.grpc as grpcclient
9
+
10
+ from sklearn.datasets import fetch_california_housing
11
+ from sklearn.model_selection import train_test_split
12
+ from sklearn.metrics import mean_squared_error
13
+
14
+
15
+ def make_prediction(model_server, model_name, model_version, verbose):
16
+ try:
17
+ triton_client = grpcclient.InferenceServerClient(url=model_server, verbose=verbose)
18
+ except Exception as e:
19
+ print("channel creation failed: " + str(e))
20
+ sys.exit(1)
21
+ # Infer
22
+ inputs = []
23
+ outputs = []
24
+ # Load the California Housing dataset
25
+ california = fetch_california_housing()
26
+ X, y = california.data, california.target
27
+ # Split the dataset into training and testing sets
28
+ _, X_test, _, y_test = train_test_split(X, y, test_size=0.25, random_state=0)
29
+ input_data = X_test.astype(np.float32)
30
+ input_label = y_test.astype(np.float32)
31
+ print(f'input_data:\n{input_data[0]}')
32
+ print(f'input_label:\n{input_label[0]}')
33
+ # input_data = np.expand_dims(input_data, axis=0)
34
+ # Initialize the data
35
+ inputs.append(grpcclient.InferInput('float_input', [input_data.shape[0], input_data.shape[1]], "FP32"))
36
+ inputs[0].set_data_from_numpy(input_data)
37
+ outputs.append(grpcclient.InferRequestedOutput('variable'))
38
+ # Test with outputs
39
+ results = triton_client.infer(model_name=model_name, inputs=inputs, outputs=outputs)
40
+ # print("response:\n", results.get_response())
41
+ statistics = triton_client.get_inference_statistics(model_name=model_name)
42
+ # print("statistics:\n", statistics)
43
+ if len(statistics.model_stats) != 1:
44
+ print("FAILED: Inference Statistics")
45
+ sys.exit(1)
46
+ # Get the output arrays from the results
47
+ y_pred = results.as_numpy('variable').squeeze()
48
+ print(f"y_pred:\n{y_pred[0]}")
49
+ mse = mean_squared_error(y_test, y_pred)
50
+ print(f'Mean Squared Error: {mse}')
51
+
52
+
53
+ """
54
+ python client.py --model_server localhost:8001 --model_name gradient_boosting_regressor --model_version 1
55
+ """
56
+ if __name__ == "__main__":
57
+ parser = argparse.ArgumentParser(description="Make predictions using a specific model.")
58
+ parser.add_argument("--model_server", default="localhost:8001", help="The address of the model server.")
59
+ parser.add_argument("--model_name", default="gradient_boosting_regressor", help="The name of the model to use.")
60
+ parser.add_argument("--model_version", default="1", help="The version of the model to use.")
61
+ parser.add_argument("--verbose", action="store_true", required=False, default=False, help='Enable verbose output')
62
+ args = parser.parse_args()
63
+ make_prediction(args.model_server, args.model_name, args.model_version, args.verbose)
scikit-learn/gradient_boosting_regressor/convert2onnx.sh ADDED
@@ -0,0 +1 @@
 
 
1
+ python ../convert2onnx.py california gradient_boosting_regressor.joblib gradient_boosting_regressor.onnx
scikit-learn/gradient_boosting_regressor/gradient_boosting_regressor.joblib ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:fe77c60b969a21089e927061d442b5752cffabf8ac32b522b887928fa1bd585a
3
+ size 1019468
scikit-learn/gradient_boosting_regressor/gradient_boosting_regressor.onnx ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:b63d719c545722eb97b2f74ff5de0932fc39b46f47f504ed15cddb29c1396e08
3
+ size 411998
scikit-learn/gradient_boosting_regressor/gradient_boosting_regressor.zip ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:d17ef5133e1d83847fc1d6efc4067fb9fa790bd8541e6f6d09c1afabe6f59eaf
3
+ size 59217
scikit-learn/gradient_boosting_regressor/gradient_boosting_regressor/1/model.onnx ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:b63d719c545722eb97b2f74ff5de0932fc39b46f47f504ed15cddb29c1396e08
3
+ size 411998
scikit-learn/gradient_boosting_regressor/gradient_boosting_regressor/config.pbtxt ADDED
@@ -0,0 +1,18 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ name: "gradient_boosting_regressor"
2
+ backend: "onnxruntime"
3
+ max_batch_size: 0
4
+ input [
5
+ {
6
+ name: "float_input"
7
+ data_type: TYPE_FP32
8
+ dims: [ 8 ]
9
+ }
10
+ ]
11
+ output [
12
+ ]
13
+ instance_group [
14
+ {
15
+ count: 1
16
+ kind: KIND_CPU
17
+ }
18
+ ]
scikit-learn/gradient_boosting_regressor/model_packaging.sh ADDED
@@ -0,0 +1 @@
 
 
1
+ python ../model_packaging.py .
scikit-learn/gradient_boosting_regressor/predict.py ADDED
@@ -0,0 +1,31 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ #!/usr/bin/env python3
2
+ # -*- coding: utf-8 -*-
3
+
4
+ import joblib
5
+
6
+ from sklearn.datasets import fetch_california_housing
7
+ from sklearn.model_selection import train_test_split
8
+ from sklearn.metrics import mean_squared_error
9
+
10
+ # Load the model from disk
11
+ loaded_model = joblib.load('gradient_boosting_regressor.joblib')
12
+
13
+ # Set the random seed
14
+ random_seed = 0
15
+
16
+ # Load the dataset
17
+ dataset = fetch_california_housing()
18
+ X, y = dataset.data, dataset.target
19
+
20
+ # Split the dataset into training and testing sets
21
+ _, X_test, _, y_test = train_test_split(X, y, test_size=0.25, random_state=random_seed)
22
+ print(f'X_test:\n{X_test[0]}')
23
+ print(f'y_test:\n{y_test[0]}')
24
+
25
+ # Use the model to make predictions on the test data
26
+ y_pred = loaded_model.predict(X_test)
27
+ print(f'y_pred:\n{y_pred[0]}')
28
+
29
+ # Score the model using mean squared error
30
+ mse = mean_squared_error(y_test, y_pred)
31
+ print(f'Mean Squared Error: {mse}')
scikit-learn/gradient_boosting_regressor/train.py ADDED
@@ -0,0 +1,27 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ #!/usr/bin/env python3
2
+ # -*- coding: utf-8 -*-
3
+
4
+ import joblib
5
+ import numpy as np
6
+
7
+ from sklearn.ensemble import GradientBoostingRegressor
8
+ from sklearn.datasets import fetch_california_housing
9
+ from sklearn.model_selection import train_test_split
10
+
11
+ # Set the random seed
12
+ random_seed = 0
13
+ np.random.seed(random_seed)
14
+
15
+ # Load the dataset
16
+ dataset = fetch_california_housing()
17
+ X, y = dataset.data, dataset.target
18
+
19
+ # Split the dataset into training and testing sets
20
+ X_train, _, y_train, _ = train_test_split(X, y, test_size=0.25, random_state=random_seed)
21
+
22
+ # Create and train model
23
+ model = GradientBoostingRegressor(n_estimators=100, max_depth=6, random_state=42)
24
+ model.fit(X_train, y_train)
25
+
26
+ # Save the trained model to disk
27
+ joblib.dump(model, 'gradient_boosting_regressor.joblib')