from typing import Any, List, Mapping, Optional, Iterator, Dict
from langchain_core.callbacks.manager import CallbackManagerForLLMRun
from langchain_core.language_models.chat_models import BaseChatModel
from langchain_core.messages import BaseMessage, AIMessage, HumanMessage, SystemMessage
from langchain_core.outputs import ChatResult, ChatGeneration
import aiohttp
import json
import os
from dotenv import load_dotenv

load_dotenv()

class ChatDeepseek(BaseChatModel):
    """DeepSeek chat models API."""
    
    api_key: str = os.getenv("DEEPSEEK_API_KEY")
    api_base: str = "https://api.deepseek.com/v1"
    model_name: str = "deepseek-chat"
    temperature: float = 0.7
    max_tokens: int = 2000
    session: Optional[aiohttp.ClientSession] = None

    @property
    def _llm_type(self) -> str:
        """Return type of chat model."""
        return "deepseek-chat"

    def _generate(
        self,
        messages: List[BaseMessage],
        stop: Optional[List[str]] = None,
        run_manager: Optional[CallbackManagerForLLMRun] = None,
        **kwargs: Any,
    ) -> ChatResult:
        """Generate a chat response."""
        response = self._call(messages)
        return ChatResult(generations=[ChatGeneration(message=AIMessage(content=response))])

    def _convert_messages_to_dict(self, messages: List[BaseMessage]) -> List[dict]:
        """Convert messages to API dict format."""
        message_dicts = []
        for message in messages:
            if isinstance(message, SystemMessage):
                message_dicts.append({"role": "system", "content": message.content})
            elif isinstance(message, AIMessage):
                message_dicts.append({"role": "assistant", "content": message.content})
            elif isinstance(message, HumanMessage):
                message_dicts.append({"role": "user", "content": message.content})
            else:
                raise ValueError(f"Got unknown message type: {message}")
        return message_dicts

    async def _create_session(self) -> None:
        """Create aiohttp session if not exists."""
        if self.session is None:
            self.session = aiohttp.ClientSession()

    async def _call(
        self,
        messages: List[BaseMessage],
        stop: Optional[List[str]] = None,
        run_manager: Optional[CallbackManagerForLLMRun] = None,
        **kwargs: Any,
    ) -> str:
        """Call the DeepSeek API and return the response."""
        await self._create_session()
        
        headers = {
            "Authorization": f"Bearer {self.api_key}",
            "Content-Type": "application/json"
        }
        
        data = {
            "model": self.model_name,
            "messages": self._convert_messages_to_dict(messages),
            "temperature": self.temperature,
            "max_tokens": self.max_tokens,
            "stream": False
        }
        
        if stop:
            data["stop"] = stop
            
        try:
            async with self.session.post(
                f"{self.api_base}/chat/completions",
                headers=headers,
                json=data
            ) as response:
                response_data = await response.json()
                return response_data["choices"][0]["message"]["content"]
        except Exception as e:
            raise ValueError(f"Error calling DeepSeek API: {str(e)}")

    async def _astream(
        self,
        messages: List[BaseMessage],
        stop: Optional[List[str]] = None,
        run_manager: Optional[CallbackManagerForLLMRun] = None,
        **kwargs: Any,
    ) -> Iterator[ChatResult]:
        """Async stream the response from the DeepSeek API."""
        await self._create_session()
        
        headers = {
            "Authorization": f"Bearer {self.api_key}",
            "Content-Type": "application/json"
        }
        
        data = {
            "model": self.model_name,
            "messages": self._convert_messages_to_dict(messages),
            "temperature": self.temperature,
            "max_tokens": self.max_tokens,
            "stream": True
        }
        
        if stop:
            data["stop"] = stop
            
        try:
            async with self.session.post(
                f"{self.api_base}/chat/completions",
                headers=headers,
                json=data
            ) as response:
                async for line in response.content:
                    if line:
                        try:
                            line = line.decode('utf-8').strip()
                            if line.startswith('data: '):
                                line = line[6:]  # 移除 "data: " 前缀
                                if line != "[DONE]":
                                    chunk = json.loads(line)
                                    if chunk.get('choices') and chunk['choices'][0].get('delta', {}).get('content'):
                                        content = chunk['choices'][0]['delta']['content']
                                        yield ChatResult(generations=[ChatGeneration(message=AIMessage(content=content))])
                        except Exception as e:
                            print(f"Error processing stream chunk: {e}")
        except Exception as e:
            raise ValueError(f"Error calling DeepSeek API: {str(e)}")

    async def aclose(self) -> None:
        """Close the aiohttp session."""
        if self.session:
            await self.session.close()
            self.session = None 