|
""" |
|
update time: 2025.01.06 |
|
""" |
|
import json |
|
import requests |
|
|
|
|
|
def get_models(): |
|
models = { |
|
"object": "list", |
|
"data": [ |
|
{"id": "Qwen2.5-72B", "object": "model", "created": 0, "owned_by": "Qwen"}, |
|
{"id": "Llama3.3-70B", "object": "model", "created": 0, "owned_by": "Nemotron"}, |
|
{"id": "Pixtral-124B", "object": "model", "created": 0, "owned_by": "Pixtral"}, |
|
{"id": "Qwen2.5-Coder-32B", "object": "model", "created": 0, "owned_by": "Qwen"}, |
|
] |
|
} |
|
return json.dumps(models) |
|
|
|
|
|
def is_model_available(model_id): |
|
|
|
models_json = get_models() |
|
|
|
|
|
models_data = json.loads(models_json) |
|
|
|
|
|
for model in models_data.get("data", []): |
|
if model["id"] == model_id: |
|
return True |
|
|
|
return False |
|
|
|
|
|
def get_auto_model(model_id=None): |
|
return "Llama3.3-70B" |
|
|
|
|
|
|
|
def get_model_by_autoupdate(model_id=None): |
|
|
|
models_data = json.loads(get_models())["data"] |
|
|
|
|
|
valid_ids = [model["id"] for model in models_data] |
|
|
|
|
|
if model_id not in valid_ids: |
|
model_id = "Llama3.3-70B" |
|
|
|
|
|
model_data = next((model for model in models_data if model["id"] == model_id), None) |
|
|
|
|
|
return model_data["id"] if model_data else None |
|
|
|
|
|
def chat_completion_message( |
|
user_prompt, |
|
user_id: str = None, |
|
session_id: str = None, |
|
system_prompt="You are a helpful assistant.", |
|
model="Llama3.3-70B", |
|
project="DecentralGPT", stream=False, |
|
temperature=0.3, max_tokens=1024, top_p=0.5, |
|
frequency_penalty=0, presence_penalty=0): |
|
"""未来会增加回话隔离: 单人对话,单次会话""" |
|
messages = [ |
|
{"role": "system", "content": system_prompt}, |
|
{"role": "user", "content": user_prompt} |
|
] |
|
return chat_completion_messages(messages,user_id,session_id, model, project, stream, temperature, max_tokens, top_p, frequency_penalty, |
|
presence_penalty) |
|
|
|
|
|
def chat_completion_messages( |
|
messages, |
|
model="Llama3.3-70B", |
|
user_id: str = None, |
|
session_id: str = None, |
|
project="DecentralGPT", stream=False, temperature=0.3, max_tokens=1024, top_p=0.5, |
|
frequency_penalty=0, presence_penalty=0): |
|
url = 'https://usa-chat.degpt.ai/api/v0/chat/completion/proxy' |
|
headers = { |
|
'sec-ch-ua-platform': '"macOS"', |
|
'Referer': 'https://www.degpt.ai/', |
|
'User-Agent': 'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_7) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/131.0.0.0 Safari/537.36', |
|
'sec-ch-ua': 'Google Chrome";v="131", "Chromium";v="131", "Not_A Brand";v="24"', |
|
'DNT': '1', |
|
'Content-Type': 'application/json', |
|
'sec-ch-ua-mobile': '?0' |
|
} |
|
payload = { |
|
"model": get_auto_model(model), |
|
"messages": messages, |
|
"project": project, |
|
"stream": stream, |
|
"temperature": temperature, |
|
"max_tokens": max_tokens, |
|
"top_p": top_p, |
|
"frequency_penalty": frequency_penalty, |
|
"presence_penalty": presence_penalty |
|
|
|
} |
|
return chat_completion(url, headers, payload) |
|
|
|
|
|
def chat_completion(url, headers, payload): |
|
"""处理用户请求并保留上下文""" |
|
try: |
|
response = requests.post(url, headers=headers, json=payload) |
|
response.encoding = 'utf-8' |
|
response.raise_for_status() |
|
return response.json() |
|
except requests.exceptions.RequestException as e: |
|
print(f"请求失败: {e}") |
|
return "请求失败,请检查网络或参数配置。" |
|
except (KeyError, IndexError) as e: |
|
print(f"解析响应时出错: {e}") |
|
return "解析响应内容失败。" |
|
return {} |
|
|
|
|
|
def is_chatgpt_format(data): |
|
"""Check if the data is in the expected ChatGPT format""" |
|
try: |
|
|
|
if isinstance(data, str): |
|
try: |
|
data = json.loads(data) |
|
except json.JSONDecodeError: |
|
return False |
|
|
|
|
|
if isinstance(data, dict): |
|
|
|
if "choices" in data and isinstance(data["choices"], list) and len(data["choices"]) > 0: |
|
if "message" in data["choices"][0]: |
|
return True |
|
except Exception as e: |
|
print(f"Error checking ChatGPT format: {e}") |
|
|
|
return False |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|