Ashrafb commited on
Commit
eb42175
1 Parent(s): af2ea51

Update main.py

Browse files
Files changed (1) hide show
  1. main.py +6 -8
main.py CHANGED
@@ -1,10 +1,8 @@
1
- from fastapi import FastAPI, File, UploadFile, HTTPException
2
- from fastapi.responses import JSONResponse, FileResponse, StreamingResponse
3
  from fastapi.staticfiles import StaticFiles
4
- from fastapi.templating import Jinja2Templates
5
- from fastapi import Request, Form
6
  from huggingface_hub import InferenceClient
7
- import random
8
 
9
  app = FastAPI()
10
 
@@ -23,7 +21,7 @@ MAX_TOKENS = 2000
23
  TEMPERATURE = 0.7
24
  TOP_P = 0.95
25
 
26
- def respond(message, history: list[tuple[str, str]]):
27
  messages = [{"role": "system", "content": SYSTEM_MESSAGE}]
28
 
29
  for val in history:
@@ -51,12 +49,12 @@ def respond(message, history: list[tuple[str, str]]):
51
  async def generate(request: Request):
52
  form = await request.form()
53
  prompt = form.get("prompt")
54
- history = form.get("history", "[]") # Default to empty history
55
 
56
  if not prompt:
57
  raise HTTPException(status_code=400, detail="Prompt is required")
58
 
59
- response_generator = respond(prompt, eval(history)) # Converting string back to list
60
  final_response = ""
61
  async for part in response_generator:
62
  final_response += part
 
1
+ from fastapi import FastAPI, Request, HTTPException
2
+ from fastapi.responses import JSONResponse, FileResponse
3
  from fastapi.staticfiles import StaticFiles
 
 
4
  from huggingface_hub import InferenceClient
5
+ import json
6
 
7
  app = FastAPI()
8
 
 
21
  TEMPERATURE = 0.7
22
  TOP_P = 0.95
23
 
24
+ async def respond(message, history: list[tuple[str, str]]):
25
  messages = [{"role": "system", "content": SYSTEM_MESSAGE}]
26
 
27
  for val in history:
 
49
  async def generate(request: Request):
50
  form = await request.form()
51
  prompt = form.get("prompt")
52
+ history = json.loads(form.get("history", "[]")) # Default to empty history
53
 
54
  if not prompt:
55
  raise HTTPException(status_code=400, detail="Prompt is required")
56
 
57
+ response_generator = respond(prompt, history)
58
  final_response = ""
59
  async for part in response_generator:
60
  final_response += part