Spaces:
Build error
Build error
File size: 4,489 Bytes
6b1e4ca |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 |
import json
from http import HTTPStatus
import dashscope
from dashscope import Generation
from openai import OpenAI
def qwen(query):
client = OpenAI(
api_key="sk-39b39862ebfb4735aae411cdaa4b99dd",
base_url="https://dashscope.aliyuncs.com/compatible-mode/v1",
)
completion = client.chat.completions.create(
model="qwen-plus", # 模型列表:https://help.aliyun.com/zh/model-studio/getting-started/models
messages=[
{'role': 'system', 'content': "您是虚假新闻检测任务的助理。你需要检测给定的新闻是否正确,并根据你所知道的情况生成你的判断解释。"},
{'role': 'user', 'content': query}],
)
result = json.loads(completion.model_dump_json())
# print(result)
return result['choices'][0]['message']['content']
# print(completion.model_dump_json()['choices'][0]['message']['content'])
def llama(query):
messages = [{'role': 'system', 'content': "您是虚假新闻检测任务的助理。你需要检测给定的新闻是否正确,并根据你所知道的情况生成你的判断解释。"},
{'role': 'user', 'content': query}]
response = dashscope.Generation.call(
api_key="sk-39b39862ebfb4735aae411cdaa4b99dd",
model='llama3.3-70b-instruct',
messages=messages,
result_format='message', # set the result to be "message" format.
)
if response.status_code == HTTPStatus.OK:
# print(response)
return response['output']['choices'][0]['message']['content']
else:
return ('Request id: %s, Status code: %s, error code: %s, error message: %s' % (
response.request_id, response.status_code,
response.code, response.message
))
def glm(query):
messages = [
{'role': 'system', 'content': "您是虚假新闻检测任务的助理。你需要检测给定的新闻是否正确,并根据你所知道的情况生成你的判断解释。"},
{'role': 'user', 'content': query}]
gen = Generation()
response = gen.call(
api_key="sk-39b39862ebfb4735aae411cdaa4b99dd",
model='chatglm-6b-v2',
messages=messages,
result_format='message',
)
result = response['output']['choices'][0]['message']['content']
return result
def doubao(query):
client = OpenAI(
api_key="272b1003-3823-4723-834d-c004e9072e2f",
base_url="https://ark.cn-beijing.volces.com/api/v3",
)
completion = client.chat.completions.create(
model="ep-20250111205740-qcbs7", # your model endpoint ID
messages=[
{"role": "system", "content": "您是虚假新闻检测任务的助理。你需要检测给定的新闻是否正确,并根据你所知道的情况生成你的判断解释。"},
{"role": "user", "content": query},
],
)
return completion.choices[0].message.content
def deepseek(query):
client = OpenAI(api_key="sk-f138d39ff70c49409e69f30d2fc48d44", base_url="https://api.deepseek.com")
response = client.chat.completions.create(
model="deepseek-chat",
messages=[
{"role": "system", "content": "您是虚假新闻检测任务的助理。你需要检测给定的新闻是否正确,并根据你所知道的情况生成你的判断解释。"},
{"role": "user", "content": query},
],
stream=False
)
return response.choices[0].message.content
def baichuan(query):
messages = [{'role': 'system', 'content': "您是虚假新闻检测任务的助理。你需要检测给定的新闻是否正确,并根据你所知道的情况生成你的判断解释。"},
{'role': 'user', 'content': query}]
response = dashscope.Generation.call(
model='baichuan2-7b-chat-v1',
api_key="sk-39b39862ebfb4735aae411cdaa4b99dd",
messages=messages,
result_format='message', # set the result to be "message" format.
)
if response.status_code == HTTPStatus.OK:
return response['output']['choices'][0]['message']['content']
else:
return ('Request id: %s, Status code: %s, error code: %s, error message: %s' % (
response.request_id, response.status_code,
response.code, response.message
))
# print(qwen("9月19日,马来西亚最高元首 Ibrahim 应邀对中国进行为期8天国事访问,亦是2024年1月上任以来首次访问东盟外国家。")) |