william4416 commited on
Commit
5ce1dbc
1 Parent(s): 1c2638a

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +11 -11
app.py CHANGED
@@ -1,9 +1,10 @@
1
- from flask import Flask, request, jsonify
 
2
  from transformers import AutoModelForCausalLM, AutoTokenizer
3
  import torch
4
  import json
5
 
6
- app = Flask(__name__)
7
 
8
  # Load DialoGPT model and tokenizer
9
  tokenizer = AutoTokenizer.from_pretrained("microsoft/DialoGPT-large")
@@ -13,7 +14,10 @@ model = AutoModelForCausalLM.from_pretrained("microsoft/DialoGPT-large")
13
  with open("uts_courses.json", "r") as file:
14
  courses_data = json.load(file)
15
 
16
- def generate_response(user_input):
 
 
 
17
  if user_input.lower() == "help":
18
  return "I can help you with information about UTS courses. Feel free to ask!"
19
  elif user_input.lower() == "exit":
@@ -32,11 +36,7 @@ def generate_response(user_input):
32
  response = tokenizer.decode(response_ids[0], skip_special_tokens=True)
33
  return response
34
 
35
- @app.route("/", methods=["POST"])
36
- def chat():
37
- user_input = request.json["user_input"]
38
- response = generate_response(user_input)
39
- return jsonify({"response": response})
40
-
41
- if __name__ == "__main__":
42
- app.run(debug=True)
 
1
+ from fastapi import FastAPI
2
+ from pydantic import BaseModel
3
  from transformers import AutoModelForCausalLM, AutoTokenizer
4
  import torch
5
  import json
6
 
7
+ app = FastAPI()
8
 
9
  # Load DialoGPT model and tokenizer
10
  tokenizer = AutoTokenizer.from_pretrained("microsoft/DialoGPT-large")
 
14
  with open("uts_courses.json", "r") as file:
15
  courses_data = json.load(file)
16
 
17
+ class UserInput(BaseModel):
18
+ user_input: str
19
+
20
+ def generate_response(user_input: str):
21
  if user_input.lower() == "help":
22
  return "I can help you with information about UTS courses. Feel free to ask!"
23
  elif user_input.lower() == "exit":
 
36
  response = tokenizer.decode(response_ids[0], skip_special_tokens=True)
37
  return response
38
 
39
+ @app.post("/")
40
+ def chat(user_input: UserInput):
41
+ response = generate_response(user_input.user_input)
42
+ return {"response": response}