from typing import Dict, List, Optional, Any, Union, AsyncGenerator

from httpx import Response
from app.services.dify_client import DifyClient
from app.core.logger import logger
from app.models.chat import FileObject, ResponseMode, StopTaskRequest, MessageFeedback, SuggestedQuestionsResponse, MessageHistoryResponse

class ChatService:
    """
    聊天服务类
    
    处理与Dify API的聊天对话功能，包括发送消息、停止生成等
    """
    def __init__(self, api_key: Optional[str] = None):
        """
        初始化聊天服务
        
        Args:
            api_key: 可选的API密钥，如果不提供则使用环境变量中的配置
        """
        self.client = DifyClient(api_key=api_key)
    
    async def send_message(
        self, 
        query: str, 
        user: str, 
        conversation_id: Optional[str] = None, 
        inputs: Optional[Dict[str, Any]] = None, 
        files: Optional[List[FileObject]] = None, 
        response_mode: Union[ResponseMode, str] = ResponseMode.STREAMING,
        auto_generate_name: bool = True
    ) -> Union[Dict[str, Any], Response]:
        """
        发送聊天消息
        
        Args:
            query: 用户的问题或指令
            user: 用户的唯一标识
            conversation_id: 可选的会话ID，用于继续已有会话
            inputs: 可选的变量输入，用于填充应用中定义的变量
            files: 可选的关联文件列表
            response_mode: 响应模式，可以是 "blocking" 或 "streaming"
            auto_generate_name: 是否自动生成标题，默认true
            
        Returns:
            阻塞模式返回完整的响应数据，流式模式返回响应对象
        """
        data = {
            "query": query,
            "user": user,
            "response_mode": response_mode if isinstance(response_mode, str) else response_mode.value,
            "inputs": inputs or {},
            "auto_generate_name": auto_generate_name
        }
        
        if conversation_id:
            data["conversation_id"] = conversation_id
            
        if files:
            data["files"] = [file.dict(exclude_none=True) for file in files]
            
        logger.debug(f"发送聊天消息: user={user}, query={query}, response_mode={response_mode}")
        
        return await self.client.request(
            method="POST", 
            endpoint="/chat-messages", 
            data=data, 
            stream=(response_mode == ResponseMode.STREAMING or response_mode == "streaming")
        )
    
    async def stream_chat_response(self, response: Response) -> AsyncGenerator[Dict[str, Any], None]:
        """
        处理流式聊天响应
        
        Args:
            response: 从send_message方法获得的流式响应对象
            
        Yields:
            解析后的消息事件数据
        """
        async for event_data in self.client.stream_response(response):
            yield event_data
    
    async def stop_generation(self, task_id: str, user: str) -> Dict[str, str]:
        """
        停止响应生成
        
        Args:
            task_id: 要停止的任务ID
            user: 用户ID
            
        Returns:
            Dict[str, str]: 包含result字段，固定返回"success"
        """
        data = StopTaskRequest(user=user).dict()
        logger.debug(f"停止响应生成: task_id={task_id}, user={user}")
        return await self.client.request(
            method="POST", 
            endpoint=f"/chat-messages/{task_id}/stop", 
            data=data
        )
    
    async def create_message_feedback(
        self, 
        message_id: str, 
        rating: str, 
        user: str, 
        content: Optional[str] = None
    ) -> Dict[str, str]:
        """
        创建消息反馈
        
        Args:
            message_id: 消息ID
            rating: 评分（like或dislike或null）
            user: 用户ID
            content: 可选的反馈内容
            
        Returns:
            Dict[str, str]: 包含result字段，固定返回"success"
        """
        feedback = MessageFeedback(
            rating=rating, 
            user=user,
            content=content
        )
        
        logger.debug(f"创建消息反馈: message_id={message_id}, rating={rating}, user={user}")
        return await self.client.request(
            method="POST", 
            endpoint=f"/messages/{message_id}/feedbacks", 
            data=feedback.dict(exclude_none=True)
        )
    
    async def get_suggested_questions(
        self,
        message_id: str,
        user: str
    ) -> List[str]:
        """
        获取建议问题列表
        
        Args:
            message_id: 消息ID
            user: 用户ID
            
        Returns:
            List[str]: 建议问题列表
        """
        logger.debug(f"获取建议问题: message_id={message_id}, user={user}")
        response = await self.client.request(
            method="GET",
            endpoint=f"/messages/{message_id}/suggested",
            params={"user": user}
        )
        
        # 将API返回数据转换为模型实例
        suggested_response = SuggestedQuestionsResponse(**response)
        return suggested_response.data
    
    async def get_messages(
        self,
        conversation_id: str,
        user: str,
        first_id: Optional[str] = None,
        limit: int = 20
    ) -> MessageHistoryResponse:
        """
        获取会话历史消息
        
        Args:
            conversation_id: 会话ID
            user: 用户ID
            first_id: 当前页第一条聊天记录的ID
            limit: 一次请求返回多少条聊天记录
            
        Returns:
            MessageHistoryResponse: 包含消息列表、分页信息等
        """
        params = {
            "conversation_id": conversation_id,
            "user": user,
            "limit": limit
        }
        
        if first_id:
            params["first_id"] = first_id
            
        logger.debug(f"获取消息列表: {params}")
        response = await self.client.request(
            method="GET",
            endpoint="/messages",
            params=params
        )
        
        # 将API返回数据转换为模型实例
        return MessageHistoryResponse(**response)
