Manvikk commited on
Commit
c98c26e
·
verified ·
1 Parent(s): 00e872d

Create app.py

Browse files
Files changed (1) hide show
  1. app.py +36 -0
app.py ADDED
@@ -0,0 +1,36 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ from flask import Flask, request, jsonify
2
+ import json
3
+ from transformers import AutoModelForCausalLM, AutoTokenizer
4
+ import torch
5
+
6
+ app = Flask(__name__)
7
+
8
+ # Load AI Model
9
+ model_name = "HuggingFaceH4/zephyr-7b-beta"
10
+ tokenizer = AutoTokenizer.from_pretrained(model_name)
11
+ model = AutoModelForCausalLM.from_pretrained(model_name)
12
+
13
+ # Load Personal Data
14
+ with open("data.json", "r") as f:
15
+ user_data = json.load(f)
16
+
17
+ def get_ai_response(user_message):
18
+ """ Generate AI response using Hugging Face Model """
19
+ inputs = tokenizer(user_message, return_tensors="pt")
20
+ outputs = model.generate(**inputs, max_length=200)
21
+ reply = tokenizer.decode(outputs[0], skip_special_tokens=True)
22
+ return f"This is {user_data['assistant_name']}: {reply}"
23
+
24
+ @app.route("/reply", methods=["POST"])
25
+ def reply():
26
+ data = request.json
27
+ user_message = data.get("message", "")
28
+
29
+ if not user_message:
30
+ return jsonify({"reply": "I will answer later."})
31
+
32
+ ai_reply = get_ai_response(user_message)
33
+ return jsonify({"reply": ai_reply})
34
+
35
+ if __name__ == "__main__":
36
+ app.run(host="0.0.0.0", port=7860)