Hansimov commited on
Commit
9d28f7e
1 Parent(s): babcd78

:gem: [Feature] Support multiple messages for gpt-3.5 model, and refactor proxies

Browse files
Files changed (1) hide show
  1. tests/openai.py +44 -35
tests/openai.py CHANGED
@@ -7,9 +7,7 @@ from pathlib import Path
7
 
8
  from curl_cffi import requests
9
  from tclogger import logger, OSEnver
10
-
11
- secrets_path = Path(__file__).parents[1] / "secrets.json"
12
- ENVER = OSEnver(secrets_path)
13
 
14
 
15
  class OpenaiAPI:
@@ -42,17 +40,6 @@ class OpenaiAPI:
42
  "User-Agent": "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/123.0.0.0 Safari/537.36",
43
  }
44
 
45
- http_proxy = ENVER["http_proxy"]
46
- if http_proxy:
47
- self.requests_proxies = {
48
- "http": http_proxy,
49
- "https": http_proxy,
50
- }
51
- logger.note(f"> Using Proxy:", end=" ")
52
- logger.mesg(f"{ENVER['http_proxy']}")
53
- else:
54
- self.requests_proxies = None
55
-
56
  def log_request(self, url, method="GET"):
57
  logger.note(f"> {method}:", end=" ")
58
  logger.mesg(f"{url}", end=" ")
@@ -83,13 +70,16 @@ class OpenaiAPI:
83
  if line:
84
  try:
85
  data = json.loads(line, strict=False)
86
- role = data["message"]["author"]["role"]
87
- if role != "assistant":
88
- continue
89
- content = data["message"]["content"]["parts"][0]
90
- delta_content = content[self.content_offset :]
91
- self.content_offset = len(content)
92
- logger_func(delta_content, end="")
 
 
 
93
  except Exception as e:
94
  logger.warn(e)
95
  else:
@@ -100,7 +90,7 @@ class OpenaiAPI:
100
  res = requests.get(
101
  self.api_models,
102
  headers=self.requests_headers,
103
- proxies=self.requests_proxies,
104
  timeout=10,
105
  impersonate="chrome120",
106
  )
@@ -111,14 +101,31 @@ class OpenaiAPI:
111
  res = requests.post(
112
  self.api_chat_requirements,
113
  headers=self.requests_headers,
114
- proxies=self.requests_proxies,
115
  timeout=10,
116
  impersonate="chrome120",
117
  )
118
  self.chat_requirements_token = res.json()["token"]
119
  self.log_response(res)
120
 
121
- def chat_completions(self, prompt: str):
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
122
  new_headers = {
123
  "Accept": "text/event-stream",
124
  "Openai-Sentinel-Chat-Requirements-Token": self.chat_requirements_token,
@@ -127,14 +134,7 @@ class OpenaiAPI:
127
  requests_headers.update(new_headers)
128
  post_data = {
129
  "action": "next",
130
- "messages": [
131
- {
132
- "id": self.uuid,
133
- "author": {"role": "user"},
134
- "content": {"content_type": "text", "parts": [prompt]},
135
- "metadata": {},
136
- }
137
- ],
138
  "parent_message_id": "",
139
  "model": "text-davinci-002-render-sha",
140
  "timezone_offset_min": -480,
@@ -153,7 +153,7 @@ class OpenaiAPI:
153
  self.api_conversation,
154
  headers=requests_headers,
155
  json=post_data,
156
- proxies=self.requests_proxies,
157
  timeout=10,
158
  impersonate="chrome120",
159
  stream=True,
@@ -165,7 +165,16 @@ if __name__ == "__main__":
165
  api = OpenaiAPI()
166
  # api.get_models()
167
  api.auth()
168
- prompt = "你的名字?"
169
- api.chat_completions(prompt)
 
 
 
 
 
 
 
 
 
170
 
171
  # python -m tests.openai
 
7
 
8
  from curl_cffi import requests
9
  from tclogger import logger, OSEnver
10
+ from constants.env import PROXIES
 
 
11
 
12
 
13
  class OpenaiAPI:
 
40
  "User-Agent": "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/123.0.0.0 Safari/537.36",
41
  }
42
 
 
 
 
 
 
 
 
 
 
 
 
43
  def log_request(self, url, method="GET"):
44
  logger.note(f"> {method}:", end=" ")
45
  logger.mesg(f"{url}", end=" ")
 
70
  if line:
71
  try:
72
  data = json.loads(line, strict=False)
73
+ message_role = data["message"]["author"]["role"]
74
+ message_status = data["message"]["status"]
75
+ if (
76
+ message_role == "assistant"
77
+ and message_status == "in_progress"
78
+ ):
79
+ content = data["message"]["content"]["parts"][0]
80
+ delta_content = content[self.content_offset :]
81
+ self.content_offset = len(content)
82
+ logger_func(delta_content, end="")
83
  except Exception as e:
84
  logger.warn(e)
85
  else:
 
90
  res = requests.get(
91
  self.api_models,
92
  headers=self.requests_headers,
93
+ proxies=PROXIES,
94
  timeout=10,
95
  impersonate="chrome120",
96
  )
 
101
  res = requests.post(
102
  self.api_chat_requirements,
103
  headers=self.requests_headers,
104
+ proxies=PROXIES,
105
  timeout=10,
106
  impersonate="chrome120",
107
  )
108
  self.chat_requirements_token = res.json()["token"]
109
  self.log_response(res)
110
 
111
+ def transform_messages(self, messages: list[dict]):
112
+ def get_role(role):
113
+ if role in ["system", "user", "assistant"]:
114
+ return role
115
+ else:
116
+ return "system"
117
+
118
+ new_messages = [
119
+ {
120
+ "author": {"role": get_role(message["role"])},
121
+ "content": {"content_type": "text", "parts": [message["content"]]},
122
+ "metadata": {},
123
+ }
124
+ for message in messages
125
+ ]
126
+ return new_messages
127
+
128
+ def chat_completions(self, messages: list[dict]):
129
  new_headers = {
130
  "Accept": "text/event-stream",
131
  "Openai-Sentinel-Chat-Requirements-Token": self.chat_requirements_token,
 
134
  requests_headers.update(new_headers)
135
  post_data = {
136
  "action": "next",
137
+ "messages": self.transform_messages(messages),
 
 
 
 
 
 
 
138
  "parent_message_id": "",
139
  "model": "text-davinci-002-render-sha",
140
  "timezone_offset_min": -480,
 
153
  self.api_conversation,
154
  headers=requests_headers,
155
  json=post_data,
156
+ proxies=PROXIES,
157
  timeout=10,
158
  impersonate="chrome120",
159
  stream=True,
 
165
  api = OpenaiAPI()
166
  # api.get_models()
167
  api.auth()
168
+ messages = [
169
+ {"role": "system", "content": "i am Hansimov"},
170
+ {"role": "system", "content": "i have a cat named lucky"},
171
+ {"role": "user", "content": "Repeat my name and my cat's name"},
172
+ {
173
+ "role": "assistant",
174
+ "content": "Your name is Hansimov and your cat's name is Lucky.",
175
+ },
176
+ {"role": "user", "content": "summarize our conversation"},
177
+ ]
178
+ api.chat_completions(messages)
179
 
180
  # python -m tests.openai