mitulagr2 commited on
Commit
d14b0f7
·
1 Parent(s): bb7b1b4
Files changed (2) hide show
  1. app/main.py +2 -65
  2. app/rag.py +0 -3
app/main.py CHANGED
@@ -1,11 +1,8 @@
1
  import os
2
  import shutil
3
- import json
4
- import asyncio
5
  import time
6
- from datetime import datetime
7
  from typing import List
8
- from fastapi import FastAPI, UploadFile, WebSocket, WebSocketDisconnect
9
  from fastapi.middleware import Middleware
10
  from fastapi.middleware.cors import CORSMiddleware
11
  from fastapi.responses import StreamingResponse
@@ -25,60 +22,12 @@ app = FastAPI(middleware=middleware)
25
  files_dir = os.path.expanduser("~/wtp_be_files/")
26
  session_assistant = ChatPDF()
27
 
28
- class ConnectionManager:
29
- def __init__(self):
30
- self.active_connections: List[WebSocket] = []
31
-
32
- async def connect(self, websocket: WebSocket):
33
- await websocket.accept()
34
- self.active_connections.append(websocket)
35
-
36
- def disconnect(self, websocket: WebSocket):
37
- self.active_connections.remove(websocket)
38
-
39
- async def send_personal_message(self, message: str, websocket: WebSocket):
40
- await websocket.send_text(message)
41
-
42
- async def broadcast(self, message: str):
43
- for connection in self.active_connections:
44
- await connection.send_text(message)
45
-
46
- manager = ConnectionManager()
47
-
48
- @app.websocket("/ws/{client_id}")
49
- async def websocket_endpoint(websocket: WebSocket, client_id: int):
50
- await manager.connect(websocket)
51
- now = datetime.now()
52
- current_time = now.strftime("%H:%M")
53
- try:
54
- while True:
55
- data = await websocket.receive_text()
56
- data = data.strip()
57
- if len(data) > 0:
58
- if not session_assistant.pdf_count > 0:
59
- message = {"time":current_time,"clientId":client_id,"message":"Please, add a PDF document first."}
60
- await manager.send_personal_message(json.dumps(message), websocket)
61
- else:
62
- print("FETCHING STREAM")
63
- streaming_response = session_assistant.ask(data)
64
- print("STARTING STREAM")
65
- for text in streaming_response.response_gen:
66
- message = {"time":current_time,"clientId":client_id,"message":text}
67
- # await manager.broadcast(json.dumps(message))
68
- await manager.send_personal_message(json.dumps(message), websocket)
69
- print("ENDING STREAM")
70
- except WebSocketDisconnect:
71
- manager.disconnect(websocket)
72
- message = {"time":current_time,"clientId":client_id,"message":"Offline"}
73
- await manager.broadcast(json.dumps(message))
74
-
75
 
76
  def astreamer(generator):
77
  t0 = time.time()
78
  for i in generator:
79
  print(f"Chunk being yielded (time {int((time.time()-t0)*1000)}ms)", flush=True)
80
  yield i
81
- # time.sleep(0.5)
82
  print(f"Over (time {int((time.time()-t0)*1000)}ms)", flush=True)
83
 
84
 
@@ -87,19 +36,7 @@ async def process_input(text: str):
87
  if text and len(text.strip()) > 0:
88
  text = text.strip()
89
  streaming_response = session_assistant.ask(text)
90
- headers = {
91
- 'X-Content-Type-Options': 'nosniff',
92
- 'Content-Type': 'text/event-stream',
93
- 'Cache-Control': 'no-cache',
94
- 'Content-Encoding': 'none',
95
- 'Connection': 'keep-alive',
96
- 'Access-Control-Allow-Origin': '*'
97
- }
98
- return StreamingResponse(
99
- astreamer(streaming_response.response_gen),
100
- headers=headers,
101
- media_type='text/event-stream'
102
- )
103
 
104
 
105
  @app.post("/upload")
 
1
  import os
2
  import shutil
 
 
3
  import time
 
4
  from typing import List
5
+ from fastapi import FastAPI, UploadFile
6
  from fastapi.middleware import Middleware
7
  from fastapi.middleware.cors import CORSMiddleware
8
  from fastapi.responses import StreamingResponse
 
22
  files_dir = os.path.expanduser("~/wtp_be_files/")
23
  session_assistant = ChatPDF()
24
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
25
 
26
  def astreamer(generator):
27
  t0 = time.time()
28
  for i in generator:
29
  print(f"Chunk being yielded (time {int((time.time()-t0)*1000)}ms)", flush=True)
30
  yield i
 
31
  print(f"Over (time {int((time.time()-t0)*1000)}ms)", flush=True)
32
 
33
 
 
36
  if text and len(text.strip()) > 0:
37
  text = text.strip()
38
  streaming_response = session_assistant.ask(text)
39
+ return StreamingResponse(astreamer(streaming_response.response_gen), media_type='text/event-stream')
 
 
 
 
 
 
 
 
 
 
 
 
40
 
41
 
42
  @app.post("/upload")
app/rag.py CHANGED
@@ -6,7 +6,6 @@ from llama_index.core import (
6
  StorageContext,
7
  Settings,
8
  get_response_synthesizer)
9
- # from llama_index.legacy.readers.file.base import SimpleDirectoryReader
10
  from llama_index.core.node_parser import SentenceSplitter
11
  from llama_index.core.schema import TextNode, MetadataMode
12
  from llama_index.core.vector_stores import VectorStoreQuery
@@ -109,9 +108,7 @@ class ChatPDF:
109
 
110
  def ask(self, query: str):
111
  logger.info("retrieving the response to the query")
112
- print("GENERATING STREAM")
113
  streaming_response = self.query_engine.query(query)
114
- print("PASSING STREAM")
115
  return streaming_response
116
 
117
  def clear(self):
 
6
  StorageContext,
7
  Settings,
8
  get_response_synthesizer)
 
9
  from llama_index.core.node_parser import SentenceSplitter
10
  from llama_index.core.schema import TextNode, MetadataMode
11
  from llama_index.core.vector_stores import VectorStoreQuery
 
108
 
109
  def ask(self, query: str):
110
  logger.info("retrieving the response to the query")
 
111
  streaming_response = self.query_engine.query(query)
 
112
  return streaming_response
113
 
114
  def clear(self):