|
|
|
|
|
|
|
import sys |
|
import argparse |
|
import numpy as np |
|
|
|
import tritonclient.grpc as grpcclient |
|
|
|
from sklearn.datasets import fetch_california_housing |
|
from sklearn.model_selection import train_test_split |
|
from sklearn.metrics import mean_squared_error |
|
|
|
|
|
def make_prediction(model_server, model_name, model_version, verbose): |
|
try: |
|
triton_client = grpcclient.InferenceServerClient(url=model_server, verbose=verbose) |
|
except Exception as e: |
|
print("channel creation failed: " + str(e)) |
|
sys.exit(1) |
|
|
|
inputs = [] |
|
outputs = [] |
|
|
|
california = fetch_california_housing() |
|
X, y = california.data, california.target |
|
|
|
_, X_test, _, y_test = train_test_split(X, y, test_size=0.25, random_state=0) |
|
input_data = X_test.astype(np.float32) |
|
input_label = y_test.astype(np.float32) |
|
print(f'input_data:\n{input_data[0]}') |
|
print(f'input_label:\n{input_label[0]}') |
|
|
|
|
|
inputs.append(grpcclient.InferInput('float_input', [input_data.shape[0], input_data.shape[1]], "FP32")) |
|
inputs[0].set_data_from_numpy(input_data) |
|
outputs.append(grpcclient.InferRequestedOutput('variable')) |
|
|
|
results = triton_client.infer(model_name=model_name, inputs=inputs, outputs=outputs) |
|
|
|
statistics = triton_client.get_inference_statistics(model_name=model_name) |
|
|
|
if len(statistics.model_stats) != 1: |
|
print("FAILED: Inference Statistics") |
|
sys.exit(1) |
|
|
|
y_pred = results.as_numpy('variable').squeeze() |
|
print(f"y_pred:\n{y_pred[0]}") |
|
mse = mean_squared_error(y_test, y_pred) |
|
print(f'Mean Squared Error: {mse}') |
|
|
|
|
|
""" |
|
python client.py --model_server localhost:8001 --model_name adaboost_regressor --model_version 1 |
|
""" |
|
if __name__ == "__main__": |
|
parser = argparse.ArgumentParser(description="Make predictions using a specific model.") |
|
parser.add_argument("--model_server", default="localhost:8001", help="The address of the model server.") |
|
parser.add_argument("--model_name", default="adaboost_regressor", help="The name of the model to use.") |
|
parser.add_argument("--model_version", default="1", help="The version of the model to use.") |
|
parser.add_argument("--verbose", action="store_true", required=False, default=False, help='Enable verbose output') |
|
args = parser.parse_args() |
|
make_prediction(args.model_server, args.model_name, args.model_version, args.verbose) |
|
|