from config import *

messages = [{"role": "system", "content": system_prompt}]


def should_use_search(query):
    """判断是否需要联网搜索"""
    keywords = ["今天", "现在", "最新", "天气", "新闻", "搜索", "查一下"]
    return any(keyword in query.lower() for keyword in keywords)


def chating_with_ollama(user_input):
    """
    与本地部署的 Ollama 服务交互，调用指定模型进行连续聊天，带提示词。

    :param user_input: 用户输入的文本
    :return: 模型的回复
    """
    global messages  # 使用全局变量保存对话历史

    # 将用户输入添加到对话历史
    messages.append({"role": "user", "content": user_input})
    

    try:
        # 根据输入决定是否包含 tools
        chat_options = {
            'model': 'qwen2.5:7b',
            'messages': messages
        }
        if should_use_search(user_input):
            chat_options['tools'] = search_tool

        print(chat_options)
        response = client.chat(**chat_options)
        print(response['message'])
        # 检查是否需要调用工具
        if 'tool_calls' in response['message']:
            tool_call = response['message']['tool_calls'][0]
            if tool_call['function']['name'] == 'get_web_search':
                search_query = tool_call['function']['arguments']['query']
                
                search_result = get_web_search(search_query)
                print(search_result)
                # 将搜索结果追加到对话历史
                messages.append({"role": "system", "content": f"整理如下搜索结果返回：\n{search_result}"})
                # 再次调用模型
                response = client.chat(
                    model="qwen2.5:7b",
                    messages=messages
                )

        full_response = response['message']['content']
        messages.append({"role": "assistant", "content": full_response})
        return full_response
    
    except Exception as e:
        # print(f"错误详情：{e}")
        return "我累了一会儿再跟我继续聊吧"


from duckduckgo_search import DDGS

def get_web_search(query):
    """
    通过 DuckDuckGo 搜索网络并返回结果。
    参数：
        query (str): 用户的搜索查询
    返回：
        str: 搜索结果的简要文本
    """
    try:
        with DDGS() as ddgs:
            results = ddgs.text(query, max_results=3)  # 获取前 3 个搜索结果
            summary = ""
            for result in results:
                summary += f"{result['title']}: {result['body']}\n"
            return summary.strip() if summary else "抱歉，没找到相关信息。"
    except Exception as e:
        return f"搜索出错：{str(e)}"



import time

# 监听消息的主函数
def listen_and_reply_forever(listen_list=None):
    """
    持续监听微信好友消息并自动回复（仅限以 #ZM 开头）
    
    :param listen_list: 要监听的好友列表，默认为 None（监听所有已添加的）
    """
    if listen_list is None:
        listen_list = ['A一']  # 默认监听对象，替换成你的好友名
    
    # 添加监听对象
    for who in listen_list:
        wx.AddListenChat(who=who)
        print(f"已添加监听: {who}")
    
    wait = 1  # 每秒检查一次
    print("开始监听微信好友消息（仅限以 #ZM 开头）...")
    
    while True:
        try:
            msgs = wx.GetListenMessage()
            for chat in msgs:
                who = chat.who
                one_msgs = msgs.get(chat)
                for msg in one_msgs:
                    if msg.type == "friend" and msg.content.startswith("#ZM"):
                        content = msg.content
                        print(f'【{who}】：{content}')
                        user_input = content[3:].strip()
                        if user_input:
                            reply = chating_with_ollama(user_input)
                            wx.SendMsg(reply, who)
                            print(f"已回复：{reply}")
            time.sleep(wait)
        except Exception as e:
            print(f"监听出错：{e}，继续运行...")
            time.sleep(wait)  # 出错了也继续跑



# 调用函数运行
if __name__ == "__main__":
    # 可自定义监听列表
    my_friends = ['A一']  # 替换成你的好友名
    listen_and_reply_forever(my_friends)