monra commited on
Commit
ef98cb8
2 Parent(s): 7e5305c b0234d9

Merge branch 'main' of https://github.com/ramonvc/freegpt-webui

Browse files
client/html/index.html CHANGED
@@ -74,8 +74,8 @@
74
  <option value="gpt-3.5-turbo">GPT-3.5</option>
75
  <option value="gpt-3.5-turbo-0613">GPT-3.5-0613</option>
76
  <option value="gpt-3.5-turbo-16k">GPT-3.5-turbo-16k</option>
77
- <option value="gpt-3.5-turbo-16k-0613">GPT-3.5-turbo-16k-0613</option>
78
- <option value="gpt-4-0613" selected>GPT-4</option>
79
  </select>
80
  </div>
81
  <div class="field">
 
74
  <option value="gpt-3.5-turbo">GPT-3.5</option>
75
  <option value="gpt-3.5-turbo-0613">GPT-3.5-0613</option>
76
  <option value="gpt-3.5-turbo-16k">GPT-3.5-turbo-16k</option>
77
+ <option value="gpt-3.5-turbo-16k-0613" selected>GPT-3.5-turbo-16k-0613</option>
78
+ <option value="gpt-4-0613">GPT-4 (unstable)</option>
79
  </select>
80
  </div>
81
  <div class="field">
g4f/Provider/Providers/Xiaor.py ADDED
@@ -0,0 +1,39 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import requests
2
+ import os
3
+ import json
4
+ from ...typing import sha256, Dict, get_type_hints
5
+
6
+ url = 'https://xiaor.eu.org'
7
+ model = ['gpt-3.5-turbo', 'gpt-3.5-turbo-16k',
8
+ 'gpt-3.5-turbo-16k-0613', 'gpt-3.5-turbo-0613']
9
+ supports_stream = True
10
+ needs_auth = False
11
+
12
+
13
+ def _create_completion(model: str, messages: list, stream: bool, temperature: float = 0.7, **kwargs):
14
+ headers = {
15
+ 'Content-Type': 'application/json',
16
+ }
17
+ data = {
18
+ 'model': model,
19
+ 'temperature': 0.7,
20
+ 'presence_penalty': 0,
21
+ 'messages': messages,
22
+ }
23
+ response = requests.post(url + '/p1/v1/chat/completions',
24
+ json=data, stream=True)
25
+
26
+ if stream:
27
+ for chunk in response.iter_content(chunk_size=None):
28
+ chunk = chunk.decode('utf-8')
29
+ if chunk.strip():
30
+ message = json.loads(chunk)['choices'][0]['message']['content']
31
+ yield message
32
+ else:
33
+ message = response.json()['choices'][0]['message']['content']
34
+ yield message
35
+
36
+
37
+ params = f'g4f.Providers.{os.path.basename(__file__)[:-3]} supports: ' + \
38
+ '(%s)' % ', '.join(
39
+ [f"{name}: {get_type_hints(_create_completion)[name].__name__}" for name in _create_completion.__code__.co_varnames[:_create_completion.__code__.co_argcount]])
g4f/Provider/__init__.py CHANGED
@@ -24,6 +24,7 @@ from .Providers import (
24
  Theb,
25
  Vercel,
26
  Weuseing,
 
27
  Yqcloud,
28
  You,
29
  )
 
24
  Theb,
25
  Vercel,
26
  Weuseing,
27
+ Xiaor,
28
  Yqcloud,
29
  You,
30
  )
g4f/models.py CHANGED
@@ -1,5 +1,5 @@
1
  from g4f import Provider
2
-
3
 
4
  class Model:
5
  class model:
@@ -10,24 +10,22 @@ class Model:
10
  class gpt_35_turbo:
11
  name: str = 'gpt-3.5-turbo'
12
  base_provider: str = 'openai'
13
- best_provider: Provider.Provider = Provider.Better
14
- best_providers: list = [Provider.Better, Provider.Lockchat, Provider.Yqcloud, Provider.Forefront,]
15
 
16
  class gpt_35_turbo_0613:
17
  name: str = 'gpt-3.5-turbo-0613'
18
  base_provider: str = 'openai'
19
- best_provider: Provider.Provider = Provider.Better
20
 
21
  class gpt_35_turbo_16k_0613:
22
  name: str = 'gpt-3.5-turbo-16k-0613'
23
  base_provider: str = 'openai'
24
- best_provider: Provider.Provider = Provider.Gravityengine
25
- best_providers: list = [Provider.Easychat, Provider.Ezcht, Provider.Better]
26
 
27
  class gpt_35_turbo_16k:
28
  name: str = 'gpt-3.5-turbo-16k'
29
  base_provider: str = 'openai'
30
- best_provider: Provider.Provider = Provider.Better
31
 
32
  class gpt_4_dev:
33
  name: str = 'gpt-4-for-dev'
 
1
  from g4f import Provider
2
+ import random
3
 
4
  class Model:
5
  class model:
 
10
  class gpt_35_turbo:
11
  name: str = 'gpt-3.5-turbo'
12
  base_provider: str = 'openai'
13
+ best_provider: Provider.Provider = random.choice([Provider.DeepAi, Provider.Easychat])
 
14
 
15
  class gpt_35_turbo_0613:
16
  name: str = 'gpt-3.5-turbo-0613'
17
  base_provider: str = 'openai'
18
+ best_provider: Provider.Provider = random.choice([Provider.Easychat])
19
 
20
  class gpt_35_turbo_16k_0613:
21
  name: str = 'gpt-3.5-turbo-16k-0613'
22
  base_provider: str = 'openai'
23
+ best_provider: Provider.Provider = random.choice([Provider.Easychat])
 
24
 
25
  class gpt_35_turbo_16k:
26
  name: str = 'gpt-3.5-turbo-16k'
27
  base_provider: str = 'openai'
28
+ best_provider: Provider.Provider = random.choice([Provider.Easychat])
29
 
30
  class gpt_4_dev:
31
  name: str = 'gpt-4-for-dev'