# Import the libraries import os import uuid import joblib import json import gradio as gr import pandas as pd from huggingface_hub import CommitScheduler from pathlib import Path # Load the freshly trained model from disk model = joblib.load('model.joblib') # Prepare the logging functionality log_file = Path("logs/") / f"data_{uuid.uuid4()}.json" log_folder = log_file.parent scheduler = CommitScheduler( repo_id="insurance-charge-logs", repo_type="dataset", folder_path=log_folder, path_in_repo="data", every=2 ) # Define the input components age_input = gr.Slider(minimum=18, maximum=64, step=1, label='Age') sex_input = gr.Dropdown(['female','male'], label='Sex') bmi_input = gr.Slider(minimum=15, maximum=50, step=1, label='BMI') smoker_input = gr.Dropdown(['no','yes'], label='Smoker') region_input = gr.Dropdown(['northeast', 'northwest', 'southeast', 'southwest'], label='Region') # Define the output component model_output = gr.Label(label='Insurance Charge Prediction') # Define the predict function which will take features, convert to dataframe and make predictions using the saved model def predict_insurance_charges(age, sex, bmi, smoker, region): # Create a dataframe with the input features sample = { 'age': age, 'sex': sex, 'bmi': bmi, 'smoker': smoker, 'region': region } data_point = pd.DataFrame([sample]) prediction = model.predict(data_point).tolist() with scheduler.lock: with log_file.open("a") as f: f.write(json.dumps( { 'age': age, 'sex': sex, 'bmi': bmi, 'smoker': smoker, 'region': region, 'prediction': prediction[0] } )) f.write("\n") # Return the prediction return prediction[0] # Create the interface demo = gr.Interface( fn=predict_insurance_charges, inputs=[age_input, sex_input, bmi_input, smoker_input, region_input], outputs=model_output, title="HealthyLife Insurance Charge Prediction", description="This API allows you to predict the... ", allow_flagging="auto", concurrency_limit=8 ) # Launch with a load balancer demo.queue() demo.launch(share=False)