AlbertoNuin commited on
Commit
f30eab9
·
verified ·
1 Parent(s): 5a5fe33

Upload folder using huggingface_hub

Browse files
Files changed (4) hide show
  1. Dockerfile +16 -0
  2. app.py +91 -0
  3. requirements.txt +13 -0
  4. xgb_tuned.joblib +3 -0
Dockerfile ADDED
@@ -0,0 +1,16 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ FROM python:3.9-slim
2
+
3
+ # Set the working directory inside the container
4
+ WORKDIR /app
5
+
6
+ # Copy all files from the current directory to the container's working directory
7
+ COPY . .
8
+
9
+ # Install dependencies from the requirements file without using cache to reduce image size
10
+ RUN pip install --no-cache-dir --upgrade -r requirements.txt
11
+
12
+ # Define the command to start the application using Gunicorn with 4 worker processes
13
+ # - `-w 4`: Uses 4 worker processes for handling requests
14
+ # - `-b 0.0.0.0:7860`: Binds the server to port 7860 on all network interfaces
15
+ # - `app:app`: Runs the Flask app (assuming `app.py` contains the Flask instance named `app`)
16
+ CMD ["gunicorn", "-w", "4", "-b", "0.0.0.0:7860", "app:superkart_api"]
app.py ADDED
@@ -0,0 +1,91 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+
2
+ # Import necessary libraries
3
+ import numpy as np
4
+ import joblib # For loading the serialized model
5
+ import pandas as pd # For data manipulation
6
+ from flask import Flask, request, jsonify # For creating the Flask API
7
+ from pathlib import Path # For using a robust, absolute path
8
+
9
+ # Define the base directory of the script
10
+ BASE_DIR = Path(__file__).resolve().parent
11
+
12
+ # Define the full path to your model file
13
+ MODEL_PATH = BASE_DIR / "xgb_tuned.joblib"
14
+
15
+ # Initialize Flask application
16
+ superkart_api = Flask("SuperKart Sales Predictor")
17
+
18
+ # Load the trained machine learning model
19
+ model = joblib.load(MODEL_PATH)
20
+
21
+ # Define a route for the home page (GET request)
22
+ @superkart_api.get('/')
23
+ def home():
24
+ """
25
+ This function handles GET requests to the root URL ('/') of the API.
26
+ It returns a simple welcome message.
27
+ """
28
+ return "Welcome to the SuperKart Sales Predictor API !"
29
+
30
+ # Define an endpoint to predict for a single observation
31
+ @superkart_api.post('/v1/predict')
32
+ def predict_sales():
33
+ """
34
+ This function handles POST requests to the '/v1/predict' endpoint.
35
+ It expects a JSON payload containing property details and returns
36
+ the predicted rental price as a JSON response.
37
+ """
38
+ # Get JSON data from the request
39
+ data = request.get_json()
40
+
41
+ # Extract relevant customer features from the input data. The order of the column names matters.
42
+ sample = {
43
+ 'Product_Weight': data['Product_Weight'],
44
+ 'Product_MRP': data['Product_MRP'],
45
+ 'Product_Allocated_Area': data['Product_Allocated_Area'],
46
+ 'Product_Sugar_Content': data['Product_Sugar_Content'],
47
+ 'Store_Size': data['Store_Size'],
48
+ 'Store_Location_City_Type': data['Store_Location_City_Type'],
49
+ 'Store_Type': data['Store_Type'],
50
+ 'Store_Age_Years': data['Store_Age_Years'],
51
+ 'Product_Id_prefix': data['Product_Id_prefix'],
52
+ 'Product_FD_perishable': data['Product_FD_perishable'],
53
+ }
54
+
55
+ # Convert the extracted data into a DataFrame
56
+ input_data = pd.DataFrame([sample])
57
+
58
+ # Make a store sales prediction using the trained model
59
+ prediction = model.predict(input_data).tolist()[0]
60
+
61
+ # Return the prediction as a JSON response
62
+ return jsonify({'Sales': prediction})
63
+
64
+ # Define an endpoint for batch prediction (POST request)
65
+ @superkart_api.post('/v1/batch')
66
+ def predict_sales_batch():
67
+ """
68
+ This function handles POST requests to the '/v1/batch' endpoint.
69
+ It expects a CSV file containing property details for multiple properties
70
+ and returns the predicted rental prices as a dictionary in the JSON response.
71
+ """
72
+ # Get the uploaded CSV file from the request
73
+ file = request.files['file']
74
+
75
+ # Read the CSV file into a Pandas DataFrame
76
+ input_data = pd.read_csv(file)
77
+
78
+ # Make predictions for all properties in the DataFrame
79
+ predicted_sales = model.predict(input_data).tolist()
80
+
81
+ # Create a dictionary of predictions with property IDs as keys
82
+ product_ids = input_data['Product_Id'].tolist()
83
+ output_dict = dict(zip(product_ids, predicted_sales))
84
+
85
+ # Return the predictions dictionary as a JSON response
86
+ return output_dict
87
+
88
+
89
+ # Run the Flask app in debug mode
90
+ if __name__ == '__main__':
91
+ superkart_api.run(debug=True)
requirements.txt ADDED
@@ -0,0 +1,13 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ pandas==2.2.2
2
+ numpy==2.0.2
3
+ scikit-learn==1.6.1
4
+ seaborn==0.13.2
5
+ joblib==1.4.2
6
+ xgboost==2.1.4
7
+ joblib==1.4.2
8
+ Werkzeug==2.2.2
9
+ flask==2.2.2
10
+ gunicorn==20.1.0
11
+ requests==2.32.3
12
+ uvicorn[standard]
13
+ streamlit==1.43.2
xgb_tuned.joblib ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:5a34eaf37d4a092b2e7be417213f8644f5147374e5db932101cd8b480d64bd2e
3
+ size 120950