Spaces:
Sleeping
Sleeping
Update main.py
Browse files
main.py
CHANGED
@@ -94,98 +94,92 @@ def marketing_rec():
|
|
94 |
|
95 |
return jsonify(str(response['text']))
|
96 |
|
|
|
|
|
97 |
@app.route("/predict_metric", methods=["POST"])
|
98 |
@cross_origin()
|
99 |
def predict_metric():
|
100 |
-
|
101 |
-
|
102 |
-
|
103 |
-
|
104 |
-
|
105 |
-
|
106 |
-
|
107 |
-
|
108 |
-
|
109 |
-
|
110 |
-
|
111 |
-
|
112 |
-
|
113 |
-
|
114 |
-
|
115 |
-
|
116 |
-
|
117 |
-
|
118 |
-
|
119 |
-
|
120 |
-
|
121 |
-
income_data =
|
122 |
-
|
123 |
-
|
124 |
-
|
125 |
-
|
126 |
-
|
127 |
-
|
128 |
-
|
129 |
-
|
130 |
-
|
131 |
-
|
132 |
-
|
133 |
-
|
134 |
-
|
135 |
-
|
136 |
-
|
137 |
-
|
138 |
-
|
139 |
-
|
140 |
-
|
141 |
-
|
142 |
-
|
143 |
-
|
144 |
-
|
145 |
-
|
146 |
-
|
147 |
-
|
148 |
-
|
149 |
-
|
150 |
-
|
151 |
-
|
152 |
-
|
153 |
-
|
154 |
-
|
155 |
-
|
156 |
-
|
157 |
-
|
158 |
-
|
159 |
-
|
160 |
-
|
161 |
-
|
162 |
-
|
163 |
-
|
164 |
-
|
165 |
-
|
166 |
-
|
167 |
-
|
168 |
-
|
169 |
-
|
170 |
-
|
171 |
-
|
172 |
-
|
173 |
-
|
174 |
-
|
175 |
-
|
176 |
-
|
177 |
-
|
178 |
-
|
179 |
-
|
180 |
-
|
181 |
-
predictions = [{"date": row['ds'].strftime('%Y-%m-%d'), "value": row['yhat']} for _, row in forecast_data.iterrows()]
|
182 |
-
logging.info(f"Predictions: {predictions}")
|
183 |
-
|
184 |
-
return jsonify({"predictedData": predictions})
|
185 |
-
|
186 |
-
except Exception as e:
|
187 |
-
logging.error(f"Error in /predict_metric endpoint: {e}")
|
188 |
-
return jsonify({"error": str(e)}), 500
|
189 |
|
190 |
|
191 |
|
|
|
94 |
|
95 |
return jsonify(str(response['text']))
|
96 |
|
97 |
+
|
98 |
+
# Profit/Customer Engagement Prediction endpoint
|
99 |
@app.route("/predict_metric", methods=["POST"])
|
100 |
@cross_origin()
|
101 |
def predict_metric():
|
102 |
+
request_data = request.json
|
103 |
+
user_id = request_data.get("user_id")
|
104 |
+
interval = request_data.get("interval", 30)
|
105 |
+
metric_type = request_data.get("metric_type", "Profit") # "Profit" or "Customer Engagement"
|
106 |
+
|
107 |
+
transactions_ref = db.collection("system_users").document(user_id).collection("transactions")
|
108 |
+
|
109 |
+
data = []
|
110 |
+
|
111 |
+
if metric_type == "Profit":
|
112 |
+
# Fetch both Income and Expense transactions for Profit calculation
|
113 |
+
income_query = transactions_ref.where("transactionType", "==", "Income").stream()
|
114 |
+
expense_query = transactions_ref.where("transactionType", "==", "Expense").stream()
|
115 |
+
|
116 |
+
income_data = {}
|
117 |
+
expense_data = {}
|
118 |
+
|
119 |
+
for doc in income_query:
|
120 |
+
transaction = doc.to_dict()
|
121 |
+
date_str = transaction["date"]
|
122 |
+
amount = transaction["amountDue"]
|
123 |
+
income_data[date_str] = income_data.get(date_str, 0) + amount
|
124 |
+
|
125 |
+
for doc in expense_query:
|
126 |
+
transaction = doc.to_dict()
|
127 |
+
date_str = transaction["date"]
|
128 |
+
amount = transaction["amountDue"]
|
129 |
+
expense_data[date_str] = expense_data.get(date_str, 0) + amount
|
130 |
+
|
131 |
+
# Calculate net profit for each date
|
132 |
+
for date, income in income_data.items():
|
133 |
+
expense = expense_data.get(date, 0)
|
134 |
+
data.append({"date": date, "amountDue": income - expense})
|
135 |
+
|
136 |
+
elif metric_type == "Customer Engagement":
|
137 |
+
# Use count of Income transactions per day as Customer Engagement
|
138 |
+
income_query = transactions_ref.where("transactionType", "==", "Income").stream()
|
139 |
+
|
140 |
+
engagement_data = {}
|
141 |
+
for doc in income_query:
|
142 |
+
transaction = doc.to_dict()
|
143 |
+
date_str = transaction["date"]
|
144 |
+
engagement_data[date_str] = engagement_data.get(date_str, 0) + 1
|
145 |
+
|
146 |
+
for date, count in engagement_data.items():
|
147 |
+
data.append({"date": date, "amountDue": count})
|
148 |
+
|
149 |
+
# Create DataFrame from the aggregated data
|
150 |
+
df = pd.DataFrame(data)
|
151 |
+
|
152 |
+
# Ensure 'date' column is datetime
|
153 |
+
df['date'] = pd.to_datetime(df['date'])
|
154 |
+
df['date'] = df['date'].dt.tz_localize(None)
|
155 |
+
|
156 |
+
# Set 'date' as index
|
157 |
+
df = df.sort_values("date").set_index("date")
|
158 |
+
|
159 |
+
# Resample daily to ensure regular intervals (fill missing dates)
|
160 |
+
df = df.resample("D").sum().reset_index()
|
161 |
+
|
162 |
+
df.columns = ["ds", "y"] # ds: date, y: target
|
163 |
+
|
164 |
+
# Check if there's enough data to train the model
|
165 |
+
if df.shape[0] < 10:
|
166 |
+
return jsonify({"error": "Not enough data for prediction"})
|
167 |
+
|
168 |
+
# Initialize and fit the Prophet model
|
169 |
+
model = Prophet(daily_seasonality=True, yearly_seasonality=True)
|
170 |
+
model.fit(df)
|
171 |
+
|
172 |
+
# DataFrame for future predictions
|
173 |
+
future_dates = model.make_future_dataframe(periods=interval)
|
174 |
+
forecast = model.predict(future_dates)
|
175 |
+
|
176 |
+
# Extract the forecast for the requested interval
|
177 |
+
forecast_data = forecast[['ds', 'yhat']].tail(interval)
|
178 |
+
predictions = [{"date": row['ds'].strftime('%Y-%m-%d'), "value": row['yhat']} for _, row in forecast_data.iterrows()]
|
179 |
+
|
180 |
+
# Return predictions in JSON format
|
181 |
+
return jsonify({"predictedData": predictions})
|
182 |
+
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
183 |
|
184 |
|
185 |
|