#FastAPI 集成 Coze 提示词
import requests
import os
from typing import Dict, Optional


class CozePromptManager:
    """Coze 提示词管理器"""

    def __init__(self):
        self.coze_api_key = os.getenv("COZE_API_KEY")
        self.coze_base_url = "https://api.coze.com/v1"  # Coze API 地址
        self.prompt_cache: Dict[str, str] = {}

    def get_prompt_from_coze(self, prompt_id: str) -> Optional[str]:
        """从 Coze 获取提示词模板"""
        try:
            # 如果有缓存，直接返回
            if prompt_id in self.prompt_cache:
                return self.prompt_cache[prompt_id]

            # 调用 Coze API 获取提示词
            # 注意：这里需要根据 Coze 的实际 API 文档调整
            headers = {
                "Authorization": f"Bearer {self.coze_api_key}",
                "Content-Type": "application/json"
            }

            response = requests.get(
                f"{self.coze_base_url}/prompts/{prompt_id}",
                headers=headers,
                timeout=10
            )

            if response.status_code == 200:
                prompt_data = response.json()
                prompt_content = prompt_data.get("content", "")

                # 缓存提示词
                self.prompt_cache[prompt_id] = prompt_content
                return prompt_content
            else:
                print(f"从 Coze 获取提示词失败: {response.status_code}")
                return None

        except Exception as e:
            print(f"Coze 提示词获取异常: {e}")
            return None

    def refresh_prompt(self, prompt_id: str):
        """刷新缓存中的提示词"""
        if prompt_id in self.prompt_cache:
            del self.prompt_cache[prompt_id]
        return self.get_prompt_from_coze(prompt_id)


# 初始化提示词管理器
coze_manager = CozePromptManager()


#集成到对话系统中
class CozeChatRequest(BaseModel):
    message: str
    coze_prompt_id: str  # Coze 中的提示词 ID
    session_id: Optional[str] = None
    conversation_history: List[dict] = Field(default_factory=list)
    model: str = os.getenv("DEFAULT_MODEL", "deepseek-ai/DeepSeek-V3")
    max_tokens: int = 2048
    temperature: float = 0.7


@app.post("/chat/with-coze-prompt")
async def chat_with_coze_prompt(request: CozeChatRequest):
    """使用 Coze 管理的提示词进行对话"""
    try:
        # 1. 从 Coze 获取提示词
        system_prompt = coze_manager.get_prompt_from_coze(request.coze_prompt_id)

        if not system_prompt:
            raise HTTPException(
                status_code=500,
                detail=f"无法从 Coze 获取提示词: {request.coze_prompt_id}"
            )

        # 2. 构建消息数组
        messages = []

        # 添加 Coze 系统提示词
        messages.append({"role": "system", "content": system_prompt})

        # 添加对话历史
        if request.conversation_history:
            messages.extend(request.conversation_history)

        # 添加当前用户消息
        messages.append({"role": "user", "content": request.message})

        # 3. 调用硅基流动 API
        response = client.chat.completions.create(
            model=request.model,
            messages=messages,
            max_tokens=request.max_tokens,
            temperature=request.temperature,
            stream=False
        )

        ai_response = response.choices[0].message.content

        return {
            "response": ai_response,
            "coze_prompt_id": request.coze_prompt_id,
            "model_used": request.model,
            "tokens_used": response.usage.total_tokens if response.usage else None
        }

    except Exception as e:
        raise HTTPException(status_code=500, detail=f"对话失败: {str(e)}")

#方案二：Coze 作为提示词工作流引擎
class CozeWorkflowManager:
    """Coze 工作流管理器"""

    def __init__(self):
        self.coze_api_key = os.getenv("COZE_API_KEY")
        self.coze_base_url = "https://api.coze.com/v1"

    def execute_workflow(self, workflow_id: str, user_input: str, context: dict) -> dict:
        """执行 Coze 工作流"""
        try:
            headers = {
                "Authorization": f"Bearer {self.coze_api_key}",
                "Content-Type": "application/json"
            }

            payload = {
                "workflow_id": workflow_id,
                "input": {
                    "user_message": user_input,
                    "context": context
                }
            }

            response = requests.post(
                f"{self.coze_base_url}/workflows/execute",
                headers=headers,
                json=payload,
                timeout=30
            )

            if response.status_code == 200:
                return response.json()
            else:
                print(f"Coze 工作流执行失败: {response.status_code}")
                return None

        except Exception as e:
            print(f"Coze 工作流执行异常: {e}")
            return None


coze_workflow_manager = CozeWorkflowManager()


@app.post("/chat/with-coze-workflow")
async def chat_with_coze_workflow(
        message: str,
        workflow_id: str,
        session_id: Optional[str] = None
):
    """使用 Coze 工作流处理对话"""

    # 构建上下文
    context = {
        "session_id": session_id,
        "timestamp": datetime.now().isoformat(),
        "user_profile": {}  # 可以添加用户画像等信息
    }

    # 执行 Coze 工作流
    workflow_result = coze_workflow_manager.execute_workflow(
        workflow_id, message, context
    )

    if not workflow_result:
        raise HTTPException(status_code=500, detail="Coze 工作流执行失败")

    # 从工作流结果中提取增强的提示词和配置
    enhanced_prompt = workflow_result.get("enhanced_prompt")
    model_config = workflow_result.get("model_config", {})

    # 使用增强后的提示词调用硅基流动
    messages = []

    if enhanced_prompt:
        messages.append({"role": "system", "content": enhanced_prompt})

    messages.append({"role": "user", "content": message})

    response = client.chat.completions.create(
        model=model_config.get("model", "deepseek-ai/DeepSeek-V3"),
        messages=messages,
        max_tokens=model_config.get("max_tokens", 2048),
        temperature=model_config.get("temperature", 0.7),
        stream=False
    )

    ai_response = response.choices[0].message.content

    return {
        "response": ai_response,
        "workflow_id": workflow_id,
        "enhanced_prompt_used": enhanced_prompt is not None,
        "tokens_used": response.usage.total_tokens if response.usage else None
    }

#方案三：动态提示词选择
class DynamicPromptSelector:
    """基于对话内容动态选择 Coze 提示词"""

    def __init__(self):
        self.coze_manager = CozePromptManager()
        self.prompt_mappings = {
            "technical": "coze_tech_prompt_id",
            "customer_service": "coze_customer_service_prompt_id",
            "creative": "coze_creative_prompt_id",
            "analytical": "coze_analytical_prompt_id"
        }

    def select_prompt_based_on_content(self, message: str, history: List[dict]) -> str:
        """基于消息内容选择最合适的提示词"""

        # 简单的关键词匹配（可以升级为 ML 分类器）
        message_lower = message.lower()

        if any(word in message_lower for word in ['代码', '编程', '技术', 'bug', 'error']):
            return self.prompt_mappings["technical"]
        elif any(word in message_lower for word in ['客服', '帮助', '支持', '问题', '怎么']):
            return self.prompt_mappings["customer_service"]
        elif any(word in message_lower for word in ['写作', '创作', '故事', '诗歌']):
            return self.prompt_mappings["creative"]
        elif any(word in message_lower for word in ['分析', '数据', '统计', '报告']):
            return self.prompt_mappings["analytical"]
        else:
            return self.prompt_mappings["customer_service"]  # 默认


prompt_selector = DynamicPromptSelector()


@app.post("/chat/auto-prompt")
async def chat_with_auto_prompt(
        message: str,
        session_id: Optional[str] = None,
        conversation_history: List[dict] = None
):
    """自动选择最适合的 Coze 提示词"""

    # 自动选择提示词
    selected_prompt_id = prompt_selector.select_prompt_based_on_content(
        message, conversation_history or []
    )

    # 使用选中的提示词进行对话
    request = CozeChatRequest(
        message=message,
        coze_prompt_id=selected_prompt_id,
        session_id=session_id,
        conversation_history=conversation_history or []
    )

    return await chat_with_coze_prompt(request)