File size: 1,227 Bytes
95a56c0
 
 
 
 
de3c2ee
c12d231
de3c2ee
 
95a56c0
 
 
 
 
de3c2ee
 
 
 
95a56c0
 
 
 
 
 
 
 
 
 
 
de3c2ee
 
 
 
 
c12d231
 
de3c2ee
 
 
95a56c0
de3c2ee
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
from fastapi import FastAPI, Depends, HTTPException, Query
from transformers import AutoModelForCausalLM, AutoTokenizer
from typing import List
from fastapi.responses import HTMLResponse
from fastapi.staticfiles import StaticFiles
from pydantic import BaseModel
import openai

from helper import get_response_from_model

app = FastAPI()

app.mount("/static", StaticFiles(directory="static"), name="static")

class InputData(BaseModel):
    user_input: str
    api_key: str

@app.get("/", response_class=HTMLResponse)
async def read_root():
    with open("static/index.html", "r") as f:
        content = f.read()
    return HTMLResponse(content=content)

# Initialize model and tokenizer
# tokenizer = AutoTokenizer.from_pretrained("Qwen/Qwen-14B-Chat-int4")
# model = AutoModelForCausalLM.from_pretrained("Qwen/Qwen-14B-Chat-int4").eval()

@app.post("/chat/")
def chat(input_data: InputData):
    print("input_data: ", input_data)
    user_input = input_data.user_input
    api_key = input_data.api_key

    openai.api_key = api_key
    
    response = get_response_from_model(user_input)

    return {"response": response} 
    
    # return {"response": f"user input: {input_data.user_input}, api_key: {input_data.api_key}"}