import lazyllm
from lazyllm import (
    fc_register, Document, Retriever, 
    OnlineEmbeddingModule, OnlineChatModule, WebModule,
    ReactAgent,Reranker,SentenceSplitter,pipeline
)
base_url_http = "https://dashscope.aliyuncs.com/compatible-mode"
doc_path="F:\\file\\my\\workplace\\agentLazyLLM\\lazyllm-venv\\docs"


embed_model = OnlineEmbeddingModule(source='qwen', embed_model_name='text-embedding-v4',type="embed")

doc = Document(dataset_path=doc_path, embed=embed_model)
retriever = Retriever(doc, group_name='CoarseChunk', similarity="bm25_chinese", topk=3)
# reranker = Reranker('ModuleReranker', model=online_rerank, topk=3)
# print(retriever)

# query = "介绍一下PHP内核"    
# 将Retriever组件召回的节点全部存储到列表doc_node_list中
# doc_node_list = retriever(query=query)
# 将query和召回节点中的内容组成dict，作为大模型的输入
# res = llm({"query": query, "context_str": "  ".join([node.get_content() for node in doc_node_list])})

# print(f"answer: {res}")    

@fc_register("tool")
def search_knowledge_base(query:str):
    """
    搜索知识库并返回相关文档内容
    
    Args:
        query (str): 搜索查询字符串
    """
    doc_node_list = retriever(query=query)
    context_str = "".join([node.get_content() for node in doc_node_list])
    return context_str

# agent = lazyllm.OnlineChatModule(
#     source="qwen",
#     model="qwen-plus",
#     stream=False,
#     base_url=base_url_http,
#     tools=['search_knowledge_base']
#     )
llm=OnlineChatModule(source='qwen', model="qwen-plus", stream=False,base_url=base_url_http)
# prompt 设计
prompt = '你是一个AI,说话风趣幽默，有耐心'
llm.prompt(lazyllm.ChatPrompter(instruction=prompt, extra_keys=['context_str']))

# doc.create_node_group(name="block", transform=SentenceSplitter, chunk_size=1024, chunk_overlap=100)
# doc.create_node_group(name="line", transform=SentenceSplitter, chunk_size=128, chunk_overlap=20, parent="block")
# rerank_model = OnlineEmbeddingModule(source='qwen', embed_model_name='text-embedding-v4',type="rerank")

# with pipeline() as ppl:
#     with lazyllm.parallel().sum as ppl.prl:
#         prl.r1 = Retriever(doc, group_name='line', similarity="cosine", topk=6, target='block')
#         prl.r2 = Retriever(doc, group_name='block', similarity="cosine", topk=6)
#     ppl.reranker = Reranker('ModuleReranker', model=rerank_model, output_format='content',join=True) | bind(query=ppl.input)
#     ppl.formatter = (lambda context, query: dict(context_str=str(context), query=query)) | bind(query=ppl.input)
#     ppl.llm = OnlineChatModule(source='qwen', model="qwen-plus", stream=False,base_url=base_url_http).prompt(lazyllm.ChatPrompter(prompt, extra_keys=["context_str"]))

# agent = ReactAgent(
#     ppl.llm,
#     tools=['search_knowledge_base'],
#     prompt=prompt,
#     stream=False
# )


agent = ReactAgent(
    llm,
    tools=['search_knowledge_base'],
    prompt=prompt,
    stream=False
)

w  = WebModule(agent,stream=False)
w.start().wait()


# while True:
#     query = input("query(enter 'quit' to exit): ")
#     if query == "quit":
#         break
#     res = ppl(query)
#     print(f"answer: {res}")   