monra commited on
Commit
26ae075
1 Parent(s): 52a6b09

Synced repo using 'sync_with_huggingface' Github Action

Browse files
client/html/index.html CHANGED
@@ -85,7 +85,6 @@
85
  <div class="field">
86
  <select class="dropdown" name="model" id="model">
87
  <option value="gpt-3.5-turbo" selected>GPT-3.5</option>
88
- <option value="gpt-3.5-turbo-0301">GPT-3.5-turbo-0301</option>
89
  <option value="gpt-3.5-turbo-16k">GPT-3.5-turbo-16k</option>
90
  <option value="gpt-4">GPT-4</option>
91
  </select>
 
85
  <div class="field">
86
  <select class="dropdown" name="model" id="model">
87
  <option value="gpt-3.5-turbo" selected>GPT-3.5</option>
 
88
  <option value="gpt-3.5-turbo-16k">GPT-3.5-turbo-16k</option>
89
  <option value="gpt-4">GPT-4</option>
90
  </select>
g4f/Provider/Providers/Liaobots.py CHANGED
@@ -1,21 +1,37 @@
1
- import os, uuid, requests
 
 
2
  from ...typing import sha256, Dict, get_type_hints
3
 
4
  url = 'https://liaobots.com'
5
- model = ['gpt-4-0613']
6
  supports_stream = True
7
  needs_auth = True
 
8
 
9
  models = {
10
- 'gpt-4-0613': {
11
- "id":"gpt-4-0613",
12
- "name":"GPT-4",
13
- "maxLength":24000,
14
- "tokenLimit":8000
15
- }
 
 
 
 
 
 
 
 
 
 
 
 
16
  }
17
 
18
- def _create_completion(model: str, messages: list, stream: bool, **kwargs):
 
19
 
20
  print(kwargs)
21
 
@@ -25,23 +41,24 @@ def _create_completion(model: str, messages: list, stream: bool, **kwargs):
25
  'origin': 'https://liaobots.com',
26
  'referer': 'https://liaobots.com/',
27
  'user-agent': 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/112.0.0.0 Safari/537.36',
28
- 'x-auth-code': 'P6cPPK6Z8JDG3'
29
  }
30
 
31
  json_data = {
32
- 'conversationId': str(uuid.uuid4()),
33
  'model': models[model],
34
- 'authcode':"jrzVZMJiwN0NU",
35
  'messages': messages,
36
  'key': '',
37
  'prompt': "You are ChatGPT, a large language model trained by OpenAI. Follow the user's instructions carefully. Respond using markdown.",
38
  }
39
 
40
- response = requests.post('https://liaobots.com/api/chat',
41
  headers=headers, json=json_data, stream=True)
42
 
43
  for token in response.iter_content(chunk_size=2046):
44
- yield (token.decode('cp1251'))
 
45
 
46
  params = f'g4f.Providers.{os.path.basename(__file__)[:-3]} supports: ' + \
47
- '(%s)' % ', '.join([f"{name}: {get_type_hints(_create_completion)[name].__name__}" for name in _create_completion.__code__.co_varnames[:_create_completion.__code__.co_argcount]])
 
 
1
+ import os
2
+ import uuid
3
+ import requests
4
  from ...typing import sha256, Dict, get_type_hints
5
 
6
  url = 'https://liaobots.com'
7
+ model = ['gpt-3.5-turbo', 'gpt-3.5-turbo-16k', 'gpt-4']
8
  supports_stream = True
9
  needs_auth = True
10
+ working = False
11
 
12
  models = {
13
+ 'gpt-4': {
14
+ "id": "gpt-4",
15
+ "name": "GPT-4",
16
+ "maxLength": 24000,
17
+ "tokenLimit": 8000
18
+ },
19
+ 'gpt-3.5-turbo': {
20
+ "id": "gpt-3.5-turbo",
21
+ "name": "GPT-3.5",
22
+ "maxLength": 12000,
23
+ "tokenLimit": 4000
24
+ },
25
+ 'gpt-3.5-turbo-16k': {
26
+ "id": "gpt-3.5-turbo-16k",
27
+ "name": "GPT-3.5-16k",
28
+ "maxLength": 48000,
29
+ "tokenLimit": 16000
30
+ },
31
  }
32
 
33
+
34
+ def _create_completion(model: str, messages: list, stream: bool, chatId: str, **kwargs):
35
 
36
  print(kwargs)
37
 
 
41
  'origin': 'https://liaobots.com',
42
  'referer': 'https://liaobots.com/',
43
  'user-agent': 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/112.0.0.0 Safari/537.36',
44
+ 'x-auth-code': 'qlcUMVn1KLMhd'
45
  }
46
 
47
  json_data = {
48
+ 'conversationId': chatId,
49
  'model': models[model],
 
50
  'messages': messages,
51
  'key': '',
52
  'prompt': "You are ChatGPT, a large language model trained by OpenAI. Follow the user's instructions carefully. Respond using markdown.",
53
  }
54
 
55
+ response = requests.post('https://liaobots.com/api/chat',
56
  headers=headers, json=json_data, stream=True)
57
 
58
  for token in response.iter_content(chunk_size=2046):
59
+ yield (token.decode('utf-8'))
60
+
61
 
62
  params = f'g4f.Providers.{os.path.basename(__file__)[:-3]} supports: ' + \
63
+ '(%s)' % ', '.join(
64
+ [f"{name}: {get_type_hints(_create_completion)[name].__name__}" for name in _create_completion.__code__.co_varnames[:_create_completion.__code__.co_argcount]])
g4f/models.py CHANGED
@@ -31,7 +31,7 @@ class Model:
31
  class gpt_35_turbo_16k:
32
  name: str = 'gpt-3.5-turbo-16k'
33
  base_provider: str = 'openai'
34
- best_provider: Provider.Provider = Provider.Zeabur
35
 
36
  class gpt_4_dev:
37
  name: str = 'gpt-4-for-dev'
 
31
  class gpt_35_turbo_16k:
32
  name: str = 'gpt-3.5-turbo-16k'
33
  base_provider: str = 'openai'
34
+ best_provider: Provider.Provider = Provider.Liaobots
35
 
36
  class gpt_4_dev:
37
  name: str = 'gpt-4-for-dev'