import requests
from ollama import Client

# 目标URL
url = 'http://localhost:8000/chat'

# POST请求数据
data = {
    'query': '最近的节日是什么？',
    'key2': 'value2'
}

# 发送POST请求
response = requests.post(url, json=data)

# 检查请求是否成功
if response.status_code == 200:
    # 打印返回的JSON内容
    print('Response:', response.json())
else:
    # 打印错误信息
    print(f'Error: {response.status_code}')
    print(response.text)






# # 初始化Ollama客户端
# ollama_client = Client("http://127.0.0.1:11434")

# try:
#     # 调用Ollama API
#     response = ollama_client.chat(
#         model='qwen2.5:14b-instruct-q3_K_S',
#         messages=[
#             {
#                 'role': 'system', 
#                 'content': '你是一个基于网络搜索结果的智能助手。请仔细分析提供的搜索信息，并给出准确的回答。'
#             },
#             {
#                 'role': 'user', 
#                 'content': "介绍一下你自己"
#             }
#         ]
#     )
    
#     # 打印完整的响应内容
#     print("完整响应：", response)
    
#     # 提取并打印模型生成的回复
#     if 'message' in response and 'content' in response['message']:
#         print("\n模型回复：", response['message']['content'])
#     else:
#         print("响应中未找到有效回复内容")

# except Exception as e:
#     print(f"调用Ollama API时发生错误：{str(e)}")                