"""
LangGraph集成服务层
将LangGraph工作流集成到现有的LLM服务中
"""
from typing import Dict, Any, List, Optional, Union, AsyncGenerator
from sqlalchemy.orm import Session
from app.core.logging import get_logger
from app.graphs.core import StateManager, GraphState
from app.graphs.workflows import workflow_manager
from app.models.chat_conversation import ChatConversation
from app.models.chat_message import ChatMessage
from app.models.llm_model import LLMModel

logger = get_logger(__name__)


class LangGraphService:
    """LangGraph集成服务"""
    
    def __init__(self):
        self.state_manager = StateManager()
    
    async def create_conversation_with_workflow(
        self,
        db: Session,
        user_id: int,
        title: str,
        model_id: int,
        workflow_type: str = "simple_chat"
    ) -> Dict[str, Any]:
        """创建带工作流的对话"""
        # 创建对话记录
        conversation = ChatConversation(
            user_id=user_id,
            title=title,
            model_id=model_id
        )
        db.add(conversation)
        db.commit()
        db.refresh(conversation)
        
        # 获取模型信息
        model = db.query(LLMModel).filter(LLMModel.id == model_id).first()
        
        # 创建初始状态
        initial_state = {
            "conversation_id": conversation.id,
            "user_id": user_id,
            "workflow_type": workflow_type,
            "provider": model.provider if model else "openai",
            "model_name": model.model_name if model else "gpt-3.5-turbo",
            "temperature": model.temperature.get('default', 0.7) if isinstance(model.temperature, dict) else model.temperature,
            "max_tokens": model.max_tokens if model else 4000,
            "messages": [],
            "context_data": {"conversation_title": title}
        }
        
        # 创建工作流状态
        state = self.state_manager.create_state(initial_state)
        
        return {
            "conversation_id": conversation.id,
            "workflow_id": state["workflow_id"],
            "workflow_type": workflow_type,
            "state": {
                "current_step": state["current_step"],
                "next_step": state["next_step"]
            }
        }
    
    async def process_message_with_workflow(
        self,
        db: Session,
        conversation_id: int,
        user_message: str,
        workflow_type: str = "simple_chat",
        stream: bool = False
    ) -> Union[Dict[str, Any], AsyncGenerator[str, None]]:
        """使用工作流处理消息"""
        
        # 获取对话和模型信息
        conversation = db.query(ChatConversation).filter(
            ChatConversation.id == conversation_id
        ).first()
        
        if not conversation:
            raise ValueError("对话不存在")
        
        model = db.query(LLMModel).filter(
            LLMModel.id == conversation.model_id
        ).first()
        
        if not model:
            raise ValueError("模型不存在")
        
        # 获取对话历史
        messages = db.query(ChatMessage).filter(
            ChatMessage.conversation_id == conversation_id,
            ChatMessage.is_deleted == False
        ).order_by(ChatMessage.created_at.asc()).all()
        
        # 构建消息历史
        message_history = [
            {
                'role': msg.role,
                'content': msg.content
            }
            for msg in messages
        ]
        
        # 添加新消息
        message_history.append({
            'role': 'user',
            'content': user_message
        })
        
        # 创建初始状态
        initial_state = {
            "conversation_id": conversation_id,
            "user_id": conversation.user_id,
            "workflow_type": workflow_type,
            "provider": model.provider,
            "model_name": model.model_name,
            "temperature": model.temperature.get('default', 0.7) if isinstance(model.temperature, dict) else model.temperature,
            "max_tokens": model.max_tokens,
            "messages": message_history,
            "is_streaming": stream,
            "context_data": {
                "conversation_title": conversation.title,
                "user_id": conversation.user_id
            }
        }
        
        if stream:
            return await self._process_streaming_workflow(
                db, conversation_id, user_message, initial_state
            )
        else:
            return await self._process_normal_workflow(
                db, conversation_id, user_message, initial_state
            )
    
    async def _process_normal_workflow(
        self,
        db: Session,
        conversation_id: int,
        user_message: str,
        initial_state: Dict[str, Any]
    ) -> Dict[str, Any]:
        """处理普通工作流"""
        
        # 保存用户消息
        user_msg = ChatMessage(
            conversation_id=conversation_id,
            role='user',
            content=user_message
        )
        db.add(user_msg)
        db.commit()
        
        # 执行工作流
        result = await workflow_manager.execute_workflow(
            initial_state["workflow_type"],
            initial_state
        )
        
        # 保存助手响应
        if result["success"] and result["messages"]:
            # 获取最后一条助手消息
            assistant_messages = [
                msg for msg in result["messages"] 
                if msg["role"] == "assistant"
            ]
            
            if assistant_messages:
                assistant_msg = ChatMessage(
                    conversation_id=conversation_id,
                    role='assistant',
                    content=assistant_messages[-1]["content"]
                )
                db.add(assistant_msg)
                db.commit()
        
        # 更新对话
        conversation = db.query(ChatConversation).filter(
            ChatConversation.id == conversation_id
        ).first()
        
        if conversation:
            conversation.updated_at = datetime.now()
            db.commit()
        
        return result
    
    async def _process_streaming_workflow(
        self,
        db: Session,
        conversation_id: int,
        user_message: str,
        initial_state: Dict[str, Any]
    ) -> AsyncGenerator[str, None]:
        """处理流式工作流"""
        
        # 保存用户消息
        user_msg = ChatMessage(
            conversation_id=conversation_id,
            role='user',
            content=user_message
        )
        db.add(user_msg)
        db.commit()
        
        # 执行工作流
        result = await workflow_manager.execute_workflow(
            "streaming",  # 使用流式工作流
            initial_state
        )
        
        # 提取流式响应
        if result["success"] and result["messages"]:
            assistant_messages = [
                msg for msg in result["messages"] 
                if msg["role"] == "assistant"
            ]
            
            if assistant_messages:
                response_content = assistant_messages[-1]["content"]
                
                # 模拟流式响应（实际应该在生产中实现真正的流式处理）
                for i in range(0, len(response_content), 10):
                    chunk = response_content[i:i+10]
                    yield chunk
                
                # 保存完整的响应
                assistant_msg = ChatMessage(
                    conversation_id=conversation_id,
                    role='assistant',
                    content=response_content
                )
                db.add(assistant_msg)
                db.commit()
        
        # 更新对话
        conversation = db.query(ChatConversation).filter(
            ChatConversation.id == conversation_id
        ).first()
        
        if conversation:
            conversation.updated_at = datetime.now()
            db.commit()
    
    async def get_workflow_status(
        self,
        workflow_id: str
    ) -> Optional[Dict[str, Any]]:
        """获取工作流状态"""
        state = self.state_manager.get_state(workflow_id)
        
        if not state:
            return None
        
        return {
            "workflow_id": workflow_id,
            "current_step": state["current_step"],
            "next_step": state["next_step"],
            "error_occurred": state["error_occurred"],
            "error_message": state["error_message"],
            "retry_count": state["retry_count"],
            "total_tokens": state["total_tokens"],
            "estimated_cost": state["estimated_cost"],
            "tools_used": state["tools_used"],
            "function_calls": state["function_calls"],
            "start_time": state["start_time"].isoformat(),
            "end_time": state["end_time"].isoformat() if state["end_time"] else None
        }
    
    async def cancel_workflow(
        self,
        workflow_id: str
    ) -> bool:
        """取消工作流"""
        return self.state_manager.cleanup_state(workflow_id)
    
    async def list_available_workflows(self) -> List[str]:
        """获取可用工作流列表"""
        return workflow_manager.list_available_workflows()
    
    async def get_workflow_config(
        self,
        workflow_type: str
    ) -> Dict[str, Any]:
        """获取工作流配置"""
        workflow = workflow_manager.get_workflow(workflow_type)
        
        if not workflow:
            return {}
        
        return {
            "workflow_type": workflow_type,
            "nodes": list(workflow.nodes),
            "edges": list(workflow.edges)
        }


# 全局服务实例
langgraph_service = LangGraphService()