handhandlab commited on
Commit
1919d23
·
1 Parent(s): 228da36

request and response

Browse files
Files changed (1) hide show
  1. llm.py +7 -7
llm.py CHANGED
@@ -11,18 +11,18 @@ llm = Llama.from_pretrained(
11
  app = FastAPI()
12
 
13
  class ChatRequest(BaseModel):
14
- message: str
15
 
16
  @app.post("/chat")
17
  async def chat_completion(request: ChatRequest):
 
 
 
 
18
  try:
19
  response = llm.create_chat_completion(
20
- messages=[
21
- {"role": "user", "content": request.message}
22
- ]
23
  )
24
- return {
25
- "response": response['choices'][0]['message']['content']
26
- }
27
  except Exception as e:
28
  raise HTTPException(status_code=500, detail=str(e))
 
11
  app = FastAPI()
12
 
13
  class ChatRequest(BaseModel):
14
+ messages: list[dict]
15
 
16
  @app.post("/chat")
17
  async def chat_completion(request: ChatRequest):
18
+ # print(request.messages)
19
+ # test = [{"role": "user", "content": "dsfa"}]
20
+ # print(test)
21
+ # return "haha"
22
  try:
23
  response = llm.create_chat_completion(
24
+ messages = request.messages
 
 
25
  )
26
+ return response
 
 
27
  except Exception as e:
28
  raise HTTPException(status_code=500, detail=str(e))