Manvikk's picture
Create app.py
c98c26e verified
from flask import Flask, request, jsonify
import json
from transformers import AutoModelForCausalLM, AutoTokenizer
import torch
app = Flask(__name__)
# Load AI Model
model_name = "HuggingFaceH4/zephyr-7b-beta"
tokenizer = AutoTokenizer.from_pretrained(model_name)
model = AutoModelForCausalLM.from_pretrained(model_name)
# Load Personal Data
with open("data.json", "r") as f:
user_data = json.load(f)
def get_ai_response(user_message):
""" Generate AI response using Hugging Face Model """
inputs = tokenizer(user_message, return_tensors="pt")
outputs = model.generate(**inputs, max_length=200)
reply = tokenizer.decode(outputs[0], skip_special_tokens=True)
return f"This is {user_data['assistant_name']}: {reply}"
@app.route("/reply", methods=["POST"])
def reply():
data = request.json
user_message = data.get("message", "")
if not user_message:
return jsonify({"reply": "I will answer later."})
ai_reply = get_ai_response(user_message)
return jsonify({"reply": ai_reply})
if __name__ == "__main__":
app.run(host="0.0.0.0", port=7860)