Spaces:
Sleeping
Sleeping
New API provider: Fakeopen
Browse files- client/html/index.html +2 -2
- g4f/Provider/Provider.py +1 -0
- g4f/Provider/Providers/Fakeopen.py +54 -0
- g4f/Provider/__init__.py +17 -16
- g4f/models.py +6 -6
client/html/index.html
CHANGED
@@ -73,8 +73,8 @@
|
|
73 |
<select class="dropdown" name="model" id="model">
|
74 |
<option value="gpt-3.5-turbo">GPT-3.5</option>
|
75 |
<option value="gpt-3.5-turbo-0613">GPT-3.5-0613</option>
|
76 |
-
<option value="gpt-3.5-turbo-16k">GPT-3.5-
|
77 |
-
<option value="gpt-3.5-turbo-16k-0613" selected>GPT-3.5-
|
78 |
<option value="gpt-4" disabled>GPT-4 (maintenance)</option>
|
79 |
</select>
|
80 |
</div>
|
|
|
73 |
<select class="dropdown" name="model" id="model">
|
74 |
<option value="gpt-3.5-turbo">GPT-3.5</option>
|
75 |
<option value="gpt-3.5-turbo-0613">GPT-3.5-0613</option>
|
76 |
+
<option value="gpt-3.5-turbo-16k">GPT-3.5-turbo-16k</option>
|
77 |
+
<option value="gpt-3.5-turbo-16k-0613" selected>GPT-3.5-turbo-16k-0613</option>
|
78 |
<option value="gpt-4" disabled>GPT-4 (maintenance)</option>
|
79 |
</select>
|
80 |
</div>
|
g4f/Provider/Provider.py
CHANGED
@@ -6,6 +6,7 @@ model = None
|
|
6 |
supports_stream = False
|
7 |
needs_auth = False
|
8 |
|
|
|
9 |
def _create_completion(model: str, messages: list, stream: bool, **kwargs):
|
10 |
return
|
11 |
|
|
|
6 |
supports_stream = False
|
7 |
needs_auth = False
|
8 |
|
9 |
+
|
10 |
def _create_completion(model: str, messages: list, stream: bool, **kwargs):
|
11 |
return
|
12 |
|
g4f/Provider/Providers/Fakeopen.py
ADDED
@@ -0,0 +1,54 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
import os
|
2 |
+
import json
|
3 |
+
import requests
|
4 |
+
from typing import Dict, get_type_hints
|
5 |
+
|
6 |
+
url = 'https://ai.fakeopen.com/v1/'
|
7 |
+
model = [
|
8 |
+
'gpt-3.5-turbo',
|
9 |
+
'gpt-3.5-turbo-0613'
|
10 |
+
'gpt-3.5-turbo-16k',
|
11 |
+
'gpt-3.5-turbo-16k-0613',
|
12 |
+
]
|
13 |
+
|
14 |
+
supports_stream = True
|
15 |
+
needs_auth = False
|
16 |
+
|
17 |
+
|
18 |
+
def _create_completion(model: str, messages: list, stream: bool, **kwargs):
|
19 |
+
|
20 |
+
headers = {
|
21 |
+
'Content-Type': 'application/json',
|
22 |
+
'accept': 'text/event-stream',
|
23 |
+
'Cache-Control': 'no-cache',
|
24 |
+
'Proxy-Connection': 'keep-alive',
|
25 |
+
'Authorization': f"Bearer {os.environ.get('FAKE_OPEN_KEY', 'pk-this-is-a-real-free-api-key-pk-for-everyone')}",
|
26 |
+
}
|
27 |
+
|
28 |
+
json_data = {
|
29 |
+
'messages': messages,
|
30 |
+
'temperature': 1.0,
|
31 |
+
'model': model,
|
32 |
+
'stream': stream,
|
33 |
+
}
|
34 |
+
|
35 |
+
response = requests.post(
|
36 |
+
'https://ai.fakeopen.com/v1/chat/completions', headers=headers, json=json_data, stream=True
|
37 |
+
)
|
38 |
+
|
39 |
+
for token in response.iter_lines():
|
40 |
+
decoded = token.decode('utf-8')
|
41 |
+
if decoded == '[DONE]':
|
42 |
+
break
|
43 |
+
if decoded.startswith('data: '):
|
44 |
+
data_str = decoded.replace('data: ', '')
|
45 |
+
if data_str != '[DONE]':
|
46 |
+
data = json.loads(data_str)
|
47 |
+
if 'choices' in data and 'delta' in data['choices'][0] and 'content' in data['choices'][0]['delta']:
|
48 |
+
yield data['choices'][0]['delta']['content']
|
49 |
+
|
50 |
+
|
51 |
+
|
52 |
+
|
53 |
+
params = f'g4f.Providers.{os.path.basename(__file__)[:-3]} supports: ' + '(%s)' % ', '.join(
|
54 |
+
[f"{name}: {get_type_hints(_create_completion)[name].__name__}" for name in _create_completion.__code__.co_varnames[:_create_completion.__code__.co_argcount]])
|
g4f/Provider/__init__.py
CHANGED
@@ -1,28 +1,29 @@
|
|
1 |
from . import Provider
|
2 |
from .Providers import (
|
3 |
-
Ezcht,
|
4 |
-
You,
|
5 |
-
Bing,
|
6 |
-
Yqcloud,
|
7 |
-
Theb,
|
8 |
Aichat,
|
9 |
Bard,
|
10 |
-
|
11 |
-
|
12 |
-
|
13 |
-
Liaobots,
|
14 |
-
H2o,
|
15 |
ChatgptLogin,
|
16 |
DeepAi,
|
|
|
|
|
|
|
|
|
17 |
GetGpt,
|
18 |
-
Mishalsgpt,
|
19 |
Gravityengine,
|
20 |
-
|
21 |
-
ChatgptLogin,
|
22 |
-
Phind,
|
23 |
-
Easychat,
|
24 |
hteyun,
|
25 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
26 |
)
|
27 |
|
28 |
Palm = Bard
|
|
|
1 |
from . import Provider
|
2 |
from .Providers import (
|
|
|
|
|
|
|
|
|
|
|
3 |
Aichat,
|
4 |
Bard,
|
5 |
+
Bing,
|
6 |
+
ChatgptAi,
|
7 |
+
ChatgptLogin,
|
|
|
|
|
8 |
ChatgptLogin,
|
9 |
DeepAi,
|
10 |
+
Easychat,
|
11 |
+
Ezcht,
|
12 |
+
Fakeopen,
|
13 |
+
Forefront,
|
14 |
GetGpt,
|
|
|
15 |
Gravityengine,
|
16 |
+
H2o,
|
|
|
|
|
|
|
17 |
hteyun,
|
18 |
+
Liaobots,
|
19 |
+
Lockchat,
|
20 |
+
Mishalsgpt,
|
21 |
+
Phind,
|
22 |
+
Theb,
|
23 |
+
Vercel,
|
24 |
+
Weuseing,
|
25 |
+
Yqcloud,
|
26 |
+
You,
|
27 |
)
|
28 |
|
29 |
Palm = Bard
|
g4f/models.py
CHANGED
@@ -10,24 +10,24 @@ class Model:
|
|
10 |
class gpt_35_turbo:
|
11 |
name: str = 'gpt-3.5-turbo'
|
12 |
base_provider: str = 'openai'
|
13 |
-
best_provider: Provider.Provider = Provider.
|
14 |
-
best_providers: list = [Provider.
|
15 |
|
16 |
class gpt_35_turbo_0613:
|
17 |
name: str = 'gpt-3.5-turbo-0613'
|
18 |
base_provider: str = 'openai'
|
19 |
-
best_provider: Provider.Provider = Provider.
|
20 |
|
21 |
class gpt_35_turbo_16k_0613:
|
22 |
name: str = 'gpt-3.5-turbo-16k-0613'
|
23 |
base_provider: str = 'openai'
|
24 |
-
best_provider: Provider.Provider = Provider.
|
25 |
-
best_providers: list = [Provider.Easychat, Provider.Ezcht]
|
26 |
|
27 |
class gpt_35_turbo_16k:
|
28 |
name: str = 'gpt-3.5-turbo-16k'
|
29 |
base_provider: str = 'openai'
|
30 |
-
best_provider: Provider.Provider = Provider.
|
31 |
|
32 |
class gpt_4_dev:
|
33 |
name: str = 'gpt-4-for-dev'
|
|
|
10 |
class gpt_35_turbo:
|
11 |
name: str = 'gpt-3.5-turbo'
|
12 |
base_provider: str = 'openai'
|
13 |
+
best_provider: Provider.Provider = Provider.Fakeopen
|
14 |
+
best_providers: list = [Provider.Fakeopen, Provider.Lockchat, Provider.Yqcloud, Provider.Forefront,]
|
15 |
|
16 |
class gpt_35_turbo_0613:
|
17 |
name: str = 'gpt-3.5-turbo-0613'
|
18 |
base_provider: str = 'openai'
|
19 |
+
best_provider: Provider.Provider = Provider.Fakeopen
|
20 |
|
21 |
class gpt_35_turbo_16k_0613:
|
22 |
name: str = 'gpt-3.5-turbo-16k-0613'
|
23 |
base_provider: str = 'openai'
|
24 |
+
best_provider: Provider.Provider = Provider.Fakeopen
|
25 |
+
best_providers: list = [Provider.Easychat, Provider.Ezcht, Provider.Fakeopen]
|
26 |
|
27 |
class gpt_35_turbo_16k:
|
28 |
name: str = 'gpt-3.5-turbo-16k'
|
29 |
base_provider: str = 'openai'
|
30 |
+
best_provider: Provider.Provider = Provider.Fakeopen
|
31 |
|
32 |
class gpt_4_dev:
|
33 |
name: str = 'gpt-4-for-dev'
|