# !/usr/bin/env python
# -*- coding: utf-8 -*-
# @Time    : 2025/9/24 22:34
# @Author  : Dell
# @File    : logic_route.py
# @Software: PyCharm
# @Desc    :逻辑路由:准备多个维度的数据源》llm先判断用户所需数据源》找到对应数据源
from typing import Literal
from langchain_core.prompts import ChatPromptTemplate
from langchain_core.pydantic_v1 import BaseModel, Field
from langchain_openai import ChatOpenAI
from langchain_core.runnables import RunnableLambda
llm = ChatOpenAI(model_name='qwen-plus', base_url="https://dashscope.aliyuncs.com/compatible-mode/v1", temperature=0.5,api_key="sk-7b4f290b1a3e42a1a9a1957fa44eff37")
# 建立几个公司部门，规定他们的职责
# 数据模型部门
class RouteQuery(BaseModel):
    """将用户查询路由到最相关的数据源。"""

    datasource: Literal["python_docs", "js_docs", "golang_docs"] = Field(
        ...,
        description="给出一个用户问题，选择哪个数据源与回答他们的问题最相关",
    )

# 带函数调用的LLM
structured_llm = llm.with_structured_output(RouteQuery)

# Prompt
system = """您是将用户问题路由到适当数据源的专家。

根据问题所指的编程语言，将其路由到相关数据源。"""

prompt = ChatPromptTemplate.from_messages(
    [
        ("system", system),
        ("human", "{question}"),
    ]
)

# 定义路由器
router = prompt | structured_llm
# 客户来了，问了一个问题应该由某个部门去负责
question = """为什么下面的代码不起作用:

import random
import string

def generate_password(length):
    characters = string.ascii_letters + string.digits + string.punctuation
    password = ''.join(random.choice(characters) for i in range(length))
    return password

password = generate_password(12)
print("Generated password:", password)
"""

result = router.invoke({"question": question})
print(f"打印用户所归属哪条线：{result.datasource}")
# 一旦我们有了这个，我们就可以很轻松的转到各个线条
def choose_route(result):
    if "python_docs" in result.datasource.lower():
        ### 逻辑在这里
        return "chain for python_docs"
    elif "js_docs" in result.datasource.lower():
        ### 逻辑在这里
        return "chain for js_docs"
    else:
        ### 逻辑在这里
        return "golang_docs"



full_chain = router | RunnableLambda(choose_route)
f_re=full_chain.invoke({"question": question})
print(f"转接到对应业务线去解决：{f_re}")