from langchain.embeddings.dashscope import DashScopeEmbeddings
from langchain_chroma import Chroma
from langchain.agents import AgentExecutor, create_tool_calling_agent, tool, \
  ZeroShotAgent
from langchain_core.prompts import ChatPromptTemplate
from langchain_community.chat_models.tongyi import ChatTongyi
from langchain.tools.retriever import create_retriever_tool
from dotenv import find_dotenv, load_dotenv
import os

load_dotenv(find_dotenv())
DASHSCOPE_API_KEY = os.environ["DASHSCOPE_API_KEY"]

prompt = ChatPromptTemplate.from_messages(
    [
      ("system",
       "你是一个智能客服，根据用户提出的问题进行回答，如果用户的回答涉及到手机号和地址"
       "你需要调用call_user_save函数来生成表单，需要向该函数提供用户的手机号，地址这两项参数，"
       "只有这两项参数都提供了才调用该方法，如果用户没有输入手机号和地址请不要调用该函数，如果用户的回答不涉及这两项参数，"
       "请不要调用该方法，请返回实际的回复"),
      ("placeholder", "{chat_history}"),
      ("human", "{input}"),
      ("placeholder", "{agent_scratchpad}"),
    ]
)


def send_message(message: str):
  llm = ChatTongyi(
      model_name="qwen2-72b-instruct",
      streaming=True,
      api_key=os.environ["DASHSCOPE_API_KEY"]
  )
  embeddings = DashScopeEmbeddings(
      model="text-embedding-v1",
  )

  vector = Chroma(collection_name='customer', embedding_function=embeddings,
                  persist_directory='./chroma')
  retriever = vector.as_retriever(search_type="mmr",
                                  search_kwargs={'k': 6, 'lambda_mult': 0.25})
  vector_retriever_tool = create_retriever_tool(
      retriever,
      "vector_retriever_tool",
      "当用户提问时，优先借助该工具检索相关的内容"
  )

  tools = [call_user_save, vector_retriever_tool]

  agent = create_tool_calling_agent(llm, tools, prompt)

  agent_executor = AgentExecutor.from_agent_and_tools(agent=agent, tools=tools,
                                                      verbose=True)
  result = agent_executor.invoke({"input": message})
  print(f"result: {result}")
  return result


@tool
def call_user_save(phone: int, address: str) -> str:
  """收集用户的手机号和地址 以便后续联系用户"""
  print(phone, address)
  return "保存成功"
