Daniel Marques commited on
Commit
ba93db8
1 Parent(s): 760ae83

fix: add tokenWs

Browse files
Files changed (1) hide show
  1. main.py +11 -10
main.py CHANGED
@@ -31,9 +31,15 @@ class Predict(BaseModel):
31
  class Delete(BaseModel):
32
  filename: str
33
 
 
 
34
  class MyCustomHandler(BaseCallbackHandler):
35
  def on_llm_new_token(self, token: str, **kwargs) -> None:
36
- print(f" token: {token}")
 
 
 
 
37
 
38
  async def on_llm_start(
39
  self, serialized: Dict[str, Any], prompts: List[str], **kwargs: Any
@@ -87,7 +93,6 @@ QA = RetrievalQA.from_chain_type(
87
  return_source_documents=SHOW_SOURCES,
88
  chain_type_kwargs={
89
  "prompt": QA_CHAIN_PROMPT,
90
- "memory": memory,
91
  },
92
  )
93
 
@@ -238,16 +243,12 @@ async def create_upload_file(file: UploadFile):
238
 
239
  @api_app.websocket("/ws")
240
  async def websocket_endpoint(websocket: WebSocket):
 
 
241
  await websocket.accept()
242
  while True:
243
  data = await websocket.receive_text()
244
 
245
- res = QA(data)
246
-
247
- qa_chain_response = res.stream(
248
- {"query": data},
249
- )
250
-
251
- print(f"{qa_chain_response} stream")
252
 
253
- await websocket.send_text(f"Message text was: {data}")
 
31
  class Delete(BaseModel):
32
  filename: str
33
 
34
+ tokenWS = ''
35
+
36
  class MyCustomHandler(BaseCallbackHandler):
37
  def on_llm_new_token(self, token: str, **kwargs) -> None:
38
+ global tokenWS
39
+
40
+ tokenWS = token
41
+
42
+ print(f" token: {tokenWS}")
43
 
44
  async def on_llm_start(
45
  self, serialized: Dict[str, Any], prompts: List[str], **kwargs: Any
 
93
  return_source_documents=SHOW_SOURCES,
94
  chain_type_kwargs={
95
  "prompt": QA_CHAIN_PROMPT,
 
96
  },
97
  )
98
 
 
243
 
244
  @api_app.websocket("/ws")
245
  async def websocket_endpoint(websocket: WebSocket):
246
+ global QA
247
+
248
  await websocket.accept()
249
  while True:
250
  data = await websocket.receive_text()
251
 
252
+ QA(data)
 
 
 
 
 
 
253
 
254
+ await websocket.send_text(f"Message text was: {tokenWS}")