import os
import requests
from langchain_core.tools import Tool
from functools import partial
from langchain.retrievers import ContextualCompressionRetriever
from langchain.retrievers.document_compressors import LLMChainExtractor
from langchain.docstore.document import Document
from langchain_community.chat_models import ChatOpenAI
bocha_api_key = os.environ.get('BOCHA_KEY')
# print(bocha_api_key)


def _bocha_web_search_tool(query: str, count: int = 5, llm=None) -> str:
    """
    使用 bocha web search api 进行联网搜索，返回搜索结果的字符串。
    参数:
    - query: 搜索关键词
    - count: 返回的搜索结果数量
    返回:
    - 搜索结果的字符串形式
    """
    url = 'https://api.bochaai.com/v1/web-search'
    headers = {
         'Authorization': f'Bearer {bocha_api_key}',
         'Content-Type': 'application/json'
    }
    data = {
        "query": query,
        "freshness": "nolimit",  # 搜索的时间范围，例如"oneday","oneweek","onemonth","oneyear","nolimit"
        "summary": True,  # 是否返回长文本摘要总结
        "count": count
    }

    response = requests.post(url, headers=headers, json=data)
    if response.status_code == 200:
        data = response.json()
        web_pages = data['data']['webPages']['value']

        # 转换为Document格式
        docs = [Document(
            page_content=page['summary'],
            metadata={"title": page['name'], "url": page['url']}
        ) for page in web_pages]

        # 创建上下文压缩检索器
        compressor = LLMChainExtractor.from_llm(llm or ChatOpenAI(temperature=0))
        compression_retriever = ContextualCompressionRetriever(
            base_compressor=compressor,
            base_retriever=docs  # 使用原始结果作为基础检索器
        )

        # 执行压缩检索
        compressed_docs = compression_retriever.invoke(query)

        # 格式化输出
        return "\n\n▲ 压缩后摘要 ▲\n".join(
            [f"来源：{doc.metadata['title']}\n{doc.page_content}"
             for doc in compressed_docs if doc.page_content.strip()]
        ) or "未找到相关摘要"
    else:
        raise Exception(f"API 请求失败，状态码: {response.status_code}, 错误信息: {response.text}")


# 创建 langchain 工具
def get_bocha_tool(llm=None):
    """创建工具时绑定LLM参数"""
    bound_func = partial(
        _bocha_web_search_tool,
        llm=llm or ChatOpenAI(temperature=0)  # 优先使用传入的LLM
    )

    return Tool(
        name="BochaWebSearchTool",
        func=bound_func,
        description="""
        此工具通过博查Web Search API执行网络搜索，并运用智能摘要技术提取核心信息。
        支持预配置语言模型，能够自动过滤无关内容，保留与查询直接相关的关键要点。
        """
    )

if __name__ == "__main__":
    import sys
    import os
    user_dir = os.path.join(os.path.dirname(os.path.dirname(__file__)), 'customize')
    # 将 user 目录添加到 sys.path 中
    sys.path.append(user_dir)
    from customize.get_ollama import GetOllama

    r1 = GetOllama(model_type=0, model_name="qwen2.5:3b", temperature=0)()
    # 提前绑定LLM参数
    search_tool = get_bocha_tool(llm=r1)
    print(search_tool.args)
    # # 调用时无需传llm参数
    result = search_tool.run({"tool_input":"气候变化最新研究"})