import json from http import HTTPStatus import dashscope from dashscope import Generation from openai import OpenAI def qwen(query): client = OpenAI( api_key="sk-39b39862ebfb4735aae411cdaa4b99dd", base_url="https://dashscope.aliyuncs.com/compatible-mode/v1", ) completion = client.chat.completions.create( model="qwen-plus", # 模型列表:https://help.aliyun.com/zh/model-studio/getting-started/models messages=[ {'role': 'system', 'content': "您是虚假新闻检测任务的助理。你需要检测给定的新闻是否正确,并根据你所知道的情况生成你的判断解释。"}, {'role': 'user', 'content': query}], ) result = json.loads(completion.model_dump_json()) # print(result) return result['choices'][0]['message']['content'] # print(completion.model_dump_json()['choices'][0]['message']['content']) def llama(query): messages = [{'role': 'system', 'content': "您是虚假新闻检测任务的助理。你需要检测给定的新闻是否正确,并根据你所知道的情况生成你的判断解释。"}, {'role': 'user', 'content': query}] response = dashscope.Generation.call( api_key="sk-39b39862ebfb4735aae411cdaa4b99dd", model='llama3.3-70b-instruct', messages=messages, result_format='message', # set the result to be "message" format. ) if response.status_code == HTTPStatus.OK: # print(response) return response['output']['choices'][0]['message']['content'] else: return ('Request id: %s, Status code: %s, error code: %s, error message: %s' % ( response.request_id, response.status_code, response.code, response.message )) def glm(query): messages = [ {'role': 'system', 'content': "您是虚假新闻检测任务的助理。你需要检测给定的新闻是否正确,并根据你所知道的情况生成你的判断解释。"}, {'role': 'user', 'content': query}] gen = Generation() response = gen.call( api_key="sk-39b39862ebfb4735aae411cdaa4b99dd", model='chatglm-6b-v2', messages=messages, result_format='message', ) result = response['output']['choices'][0]['message']['content'] return result def doubao(query): client = OpenAI( api_key="272b1003-3823-4723-834d-c004e9072e2f", base_url="https://ark.cn-beijing.volces.com/api/v3", ) completion = client.chat.completions.create( model="ep-20250111205740-qcbs7", # your model endpoint ID messages=[ {"role": "system", "content": "您是虚假新闻检测任务的助理。你需要检测给定的新闻是否正确,并根据你所知道的情况生成你的判断解释。"}, {"role": "user", "content": query}, ], ) return completion.choices[0].message.content def deepseek(query): client = OpenAI(api_key="sk-f138d39ff70c49409e69f30d2fc48d44", base_url="https://api.deepseek.com") response = client.chat.completions.create( model="deepseek-chat", messages=[ {"role": "system", "content": "您是虚假新闻检测任务的助理。你需要检测给定的新闻是否正确,并根据你所知道的情况生成你的判断解释。"}, {"role": "user", "content": query}, ], stream=False ) return response.choices[0].message.content def baichuan(query): messages = [{'role': 'system', 'content': "您是虚假新闻检测任务的助理。你需要检测给定的新闻是否正确,并根据你所知道的情况生成你的判断解释。"}, {'role': 'user', 'content': query}] response = dashscope.Generation.call( model='baichuan2-7b-chat-v1', api_key="sk-39b39862ebfb4735aae411cdaa4b99dd", messages=messages, result_format='message', # set the result to be "message" format. ) if response.status_code == HTTPStatus.OK: return response['output']['choices'][0]['message']['content'] else: return ('Request id: %s, Status code: %s, error code: %s, error message: %s' % ( response.request_id, response.status_code, response.code, response.message )) # print(qwen("9月19日,马来西亚最高元首 Ibrahim 应邀对中国进行为期8天国事访问,亦是2024年1月上任以来首次访问东盟外国家。"))