:recycle: [Refactor] Prettify initializaiton of OpenaiAPI requests params
Browse files- tests/openai.py +21 -16
tests/openai.py
CHANGED
@@ -11,24 +11,19 @@ ENVER = OSEnver(secrets_path)
|
|
11 |
|
12 |
class OpenaiAPI:
|
13 |
def __init__(self):
|
14 |
-
self.
|
15 |
-
self.api_models = "https://chat.openai.com/backend-anon/models"
|
16 |
|
17 |
-
def
|
18 |
-
|
19 |
-
|
20 |
-
|
21 |
-
|
22 |
-
|
23 |
-
"https": http_proxy,
|
24 |
-
}
|
25 |
-
uuid_str = str(uuid.uuid4())
|
26 |
-
requests_headers = {
|
27 |
"Accept": "*/*",
|
28 |
"Accept-Encoding": "gzip, deflate, br, zstd",
|
29 |
"Accept-Language": "en-US,en;q=0.9",
|
30 |
"Cache-Control": "no-cache",
|
31 |
-
"Oai-Device-Id":
|
32 |
"Oai-Language": "en-US",
|
33 |
"Pragma": "no-cache",
|
34 |
"Referer": "https://chat.openai.com/",
|
@@ -41,12 +36,22 @@ class OpenaiAPI:
|
|
41 |
"User-Agent": "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/123.0.0.0 Safari/537.36",
|
42 |
}
|
43 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
44 |
logger.note(f"> Get: {self.api_models}")
|
45 |
|
46 |
res = requests.get(
|
47 |
self.api_models,
|
48 |
-
headers=requests_headers,
|
49 |
-
proxies=requests_proxies,
|
50 |
timeout=10,
|
51 |
impersonate="chrome120",
|
52 |
)
|
@@ -57,6 +62,6 @@ class OpenaiAPI:
|
|
57 |
|
58 |
if __name__ == "__main__":
|
59 |
api = OpenaiAPI()
|
60 |
-
api.
|
61 |
|
62 |
# python -m tests.openai
|
|
|
11 |
|
12 |
class OpenaiAPI:
|
13 |
def __init__(self):
|
14 |
+
self.init_requests_params()
|
|
|
15 |
|
16 |
+
def init_requests_params(self):
|
17 |
+
self.api_base = "https://chat.openai.com/backend-anon"
|
18 |
+
self.api_me = f"{self.api_base}/me"
|
19 |
+
self.api_models = f"{self.api_base}/models"
|
20 |
+
self.api_chat_requirements = f"{self.api_base}/sentinel/chat-requirements"
|
21 |
+
self.requests_headers = {
|
|
|
|
|
|
|
|
|
22 |
"Accept": "*/*",
|
23 |
"Accept-Encoding": "gzip, deflate, br, zstd",
|
24 |
"Accept-Language": "en-US,en;q=0.9",
|
25 |
"Cache-Control": "no-cache",
|
26 |
+
"Oai-Device-Id": str(uuid.uuid4()),
|
27 |
"Oai-Language": "en-US",
|
28 |
"Pragma": "no-cache",
|
29 |
"Referer": "https://chat.openai.com/",
|
|
|
36 |
"User-Agent": "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/123.0.0.0 Safari/537.36",
|
37 |
}
|
38 |
|
39 |
+
http_proxy = ENVER["http_proxy"]
|
40 |
+
self.requests_proxies = {
|
41 |
+
"http": http_proxy,
|
42 |
+
"https": http_proxy,
|
43 |
+
}
|
44 |
+
|
45 |
+
def get_models(self):
|
46 |
+
if ENVER["http_proxy"]:
|
47 |
+
logger.note(f"> Using Proxy: {ENVER['http_proxy']}")
|
48 |
+
|
49 |
logger.note(f"> Get: {self.api_models}")
|
50 |
|
51 |
res = requests.get(
|
52 |
self.api_models,
|
53 |
+
headers=self.requests_headers,
|
54 |
+
proxies=self.requests_proxies,
|
55 |
timeout=10,
|
56 |
impersonate="chrome120",
|
57 |
)
|
|
|
62 |
|
63 |
if __name__ == "__main__":
|
64 |
api = OpenaiAPI()
|
65 |
+
api.get_models()
|
66 |
|
67 |
# python -m tests.openai
|