File size: 3,005 Bytes
567404a
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
#!/usr/bin/env python3
# -*- coding: utf-8 -*-

import sys
import argparse
import numpy as np

import tritonclient.grpc as grpcclient

from sklearn.datasets import fetch_openml
from sklearn.model_selection import train_test_split
from sklearn.metrics import accuracy_score

# Set the random seed
random_seed = 0
np.random.seed(random_seed)


def make_prediction(model_server, model_name, model_version, verbose):
    try:
        triton_client = grpcclient.InferenceServerClient(url=model_server, verbose=verbose)
    except Exception as e:
        print("channel creation failed: " + str(e))
        sys.exit(1)
    # Infer
    inputs = []
    outputs = []
    # Load the dataset
    dataset_name = "cardiotocography"
    dataset = fetch_openml(name=dataset_name, version=1, as_frame=False)
    X, y = dataset.data, dataset.target
    s = y == "3"
    y = s.astype(int)
    # Split the dataset into training and testing sets
    _, X_test, _, y_test = train_test_split(X, y, test_size=0.25, random_state=random_seed)
    input_data = X_test.astype(np.float32)
    input_label = y_test.astype(np.float32)
    print(f'input_data:\n{input_data[0]}')
    print(f'input_label:\n{input_label[0]}')
    # input_data = np.expand_dims(input_data, axis=0)
    # Initialize the data
    inputs.append(grpcclient.InferInput('float_input', [input_data.shape[0], input_data.shape[1]], "FP32"))
    inputs[0].set_data_from_numpy(input_data)
    outputs.append(grpcclient.InferRequestedOutput('label'))
    # Test with outputs
    results = triton_client.infer(model_name=model_name, inputs=inputs, outputs=outputs)
    # print("response:\n", results.get_response())
    statistics = triton_client.get_inference_statistics(model_name=model_name)
    # print("statistics:\n", statistics)
    if len(statistics.model_stats) != 1:
        print("FAILED: Inference Statistics")
        sys.exit(1)
    # Get the output arrays from the results
    y_pred = results.as_numpy('label').squeeze()
    # Mapping 1->0 and -1->1
    y_pred = np.where(y_pred == 1, 0, 1)
    print(f'y_pred:\n{y_pred[0]}')
    # Score the model using accuracy classification score
    acc = accuracy_score(y_test, y_pred)
    print(f'Accuracy classification score: {acc}')


"""
python client.py --model_server localhost:8001 --model_name isolation_forest --model_version 1
"""
if __name__ == "__main__":
    parser = argparse.ArgumentParser(description="Make predictions using a specific model.")
    parser.add_argument("--model_server", default="localhost:8001", help="The address of the model server.")
    parser.add_argument("--model_name", default="isolation_forest", help="The name of the model to use.")
    parser.add_argument("--model_version", default="1", help="The version of the model to use.")
    parser.add_argument("--verbose", action="store_true", required=False, default=False, help='Enable verbose output')
    args = parser.parse_args()
    make_prediction(args.model_server, args.model_name, args.model_version, args.verbose)