Spaces:
Paused
Paused
:recycle: [Refactor] OpenaiStreamOutputer: Unify JSON output formatters
Browse files- conversations/conversation_connector.py +5 -11
- networks/__init__.py +1 -1
- networks/message_outputer.py +2 -7
- networks/message_parser.py +10 -16
conversations/conversation_connector.py
CHANGED
@@ -8,7 +8,7 @@ from networks import (
|
|
8 |
ChathubRequestPayloadConstructor,
|
9 |
ConversationRequestHeadersConstructor,
|
10 |
)
|
11 |
-
from networks import MessageParser,
|
12 |
from utils.logger import logger
|
13 |
|
14 |
http_proxy = "http://localhost:11111" # Replace with yours
|
@@ -80,7 +80,7 @@ class ConversationConnector:
|
|
80 |
async def stream_chat(self, prompt="", yield_output=False):
|
81 |
await self.connect()
|
82 |
await self.send_chathub_request(prompt)
|
83 |
-
message_parser = MessageParser(outputer=
|
84 |
while not self.wss.closed:
|
85 |
response_lines_str = await self.wss.receive_str()
|
86 |
if isinstance(response_lines_str, str):
|
@@ -111,15 +111,9 @@ class ConversationConnector:
|
|
111 |
await self.wss.close()
|
112 |
await self.aiohttp_session.close()
|
113 |
if yield_output:
|
114 |
-
yield (
|
115 |
-
|
116 |
-
|
117 |
-
"content": finished_str,
|
118 |
-
"content_type": "Finished",
|
119 |
-
}
|
120 |
-
)
|
121 |
-
+ "\n"
|
122 |
-
).encode("utf-8")
|
123 |
break
|
124 |
# Stream: Heartbeat Signal
|
125 |
elif data.get("type") == 6:
|
|
|
8 |
ChathubRequestPayloadConstructor,
|
9 |
ConversationRequestHeadersConstructor,
|
10 |
)
|
11 |
+
from networks import MessageParser, OpenaiStreamOutputer
|
12 |
from utils.logger import logger
|
13 |
|
14 |
http_proxy = "http://localhost:11111" # Replace with yours
|
|
|
80 |
async def stream_chat(self, prompt="", yield_output=False):
|
81 |
await self.connect()
|
82 |
await self.send_chathub_request(prompt)
|
83 |
+
message_parser = MessageParser(outputer=OpenaiStreamOutputer())
|
84 |
while not self.wss.closed:
|
85 |
response_lines_str = await self.wss.receive_str()
|
86 |
if isinstance(response_lines_str, str):
|
|
|
111 |
await self.wss.close()
|
112 |
await self.aiohttp_session.close()
|
113 |
if yield_output:
|
114 |
+
yield message_parser.outputer.output(
|
115 |
+
content=finished_str, content_type="Finished"
|
116 |
+
)
|
|
|
|
|
|
|
|
|
|
|
|
|
117 |
break
|
118 |
# Stream: Heartbeat Signal
|
119 |
elif data.get("type") == 6:
|
networks/__init__.py
CHANGED
@@ -3,6 +3,6 @@ from .cookies_constructor import CookiesConstructor
|
|
3 |
from .conversation_request_headers_constructor import (
|
4 |
ConversationRequestHeadersConstructor,
|
5 |
)
|
6 |
-
from .message_outputer import
|
7 |
from .message_parser import MessageParser
|
8 |
from .stream_response_constructor import StreamResponseConstructor
|
|
|
3 |
from .conversation_request_headers_constructor import (
|
4 |
ConversationRequestHeadersConstructor,
|
5 |
)
|
6 |
+
from .message_outputer import OpenaiStreamOutputer
|
7 |
from .message_parser import MessageParser
|
8 |
from .stream_response_constructor import StreamResponseConstructor
|
networks/message_outputer.py
CHANGED
@@ -1,13 +1,8 @@
|
|
1 |
import json
|
2 |
|
3 |
|
4 |
-
class
|
5 |
-
def output(self, content=None, content_type=None):
|
6 |
-
return json.dumps({}).encode("utf-8")
|
7 |
-
|
8 |
-
|
9 |
-
class ContentJSONOutputer:
|
10 |
-
def output(self, content=None, content_type=None):
|
11 |
return (
|
12 |
json.dumps(
|
13 |
{
|
|
|
1 |
import json
|
2 |
|
3 |
|
4 |
+
class OpenaiStreamOutputer:
|
5 |
+
def output(self, content=None, content_type=None) -> bytes:
|
|
|
|
|
|
|
|
|
|
|
6 |
return (
|
7 |
json.dumps(
|
8 |
{
|
networks/message_parser.py
CHANGED
@@ -1,11 +1,11 @@
|
|
1 |
import json
|
2 |
|
3 |
from utils.logger import logger
|
4 |
-
from networks import
|
5 |
|
6 |
|
7 |
class MessageParser:
|
8 |
-
def __init__(self, outputer=
|
9 |
self.delta_content_pointer = 0
|
10 |
self.outputer = outputer
|
11 |
|
@@ -26,8 +26,11 @@ class MessageParser:
|
|
26 |
# Message: Suggested Questions
|
27 |
if message.get("suggestedResponses"):
|
28 |
logger.note("\nSuggested Questions: ")
|
29 |
-
|
30 |
-
|
|
|
|
|
|
|
31 |
logger.file(f"- {suggestion_text}")
|
32 |
if return_output:
|
33 |
output_bytes = self.outputer.output(
|
@@ -35,7 +38,7 @@ class MessageParser:
|
|
35 |
)
|
36 |
if message.get("suggestedResponses"):
|
37 |
output_bytes += self.outputer.output(
|
38 |
-
|
39 |
content_type="SuggestedResponses",
|
40 |
)
|
41 |
return output_bytes
|
@@ -78,14 +81,5 @@ class MessageParser:
|
|
78 |
f"Not Supported Message Type: {message_type}"
|
79 |
)
|
80 |
|
81 |
-
return (
|
82 |
-
|
83 |
-
json.dumps(
|
84 |
-
{
|
85 |
-
"content": "",
|
86 |
-
"content_type": "NotImplemented",
|
87 |
-
}
|
88 |
-
)
|
89 |
-
)
|
90 |
-
+ "\n"
|
91 |
-
).encode("utf-8")
|
|
|
1 |
import json
|
2 |
|
3 |
from utils.logger import logger
|
4 |
+
from networks import OpenaiStreamOutputer
|
5 |
|
6 |
|
7 |
class MessageParser:
|
8 |
+
def __init__(self, outputer=OpenaiStreamOutputer()):
|
9 |
self.delta_content_pointer = 0
|
10 |
self.outputer = outputer
|
11 |
|
|
|
26 |
# Message: Suggested Questions
|
27 |
if message.get("suggestedResponses"):
|
28 |
logger.note("\nSuggested Questions: ")
|
29 |
+
suggestion_texts = [
|
30 |
+
suggestion.get("text")
|
31 |
+
for suggestion in message.get("suggestedResponses")
|
32 |
+
]
|
33 |
+
for suggestion_text in suggestion_texts:
|
34 |
logger.file(f"- {suggestion_text}")
|
35 |
if return_output:
|
36 |
output_bytes = self.outputer.output(
|
|
|
38 |
)
|
39 |
if message.get("suggestedResponses"):
|
40 |
output_bytes += self.outputer.output(
|
41 |
+
suggestion_texts,
|
42 |
content_type="SuggestedResponses",
|
43 |
)
|
44 |
return output_bytes
|
|
|
81 |
f"Not Supported Message Type: {message_type}"
|
82 |
)
|
83 |
|
84 |
+
# return OpenaiStreamOutputer().output(content="", content_type="NotImplemented")
|
85 |
+
return b""
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|