import aiohttp
import os
import json
from dotenv import load_dotenv
from typing import AsyncGenerator

load_dotenv()


class LLMService:
    def __init__(self):
        self.base_url = os.getenv("LOCAL_MODEL_URL", "http://localhost:3000")
        self.api_key = os.getenv("LOCAL_MODEL_KEY", "sk-45ad727f01974b8ba76acb550f0a0040")
        self.model = os.getenv("LOCAL_MODEL_NAME", "deepseek-r1:7b")

    async def stream_chat(self, messages: list) -> AsyncGenerator[str, None]:
        """流式对话 - 适配OpenAI兼容API格式"""
        try:
            # 构建OpenAI兼容的请求格式
            data = {
                "model": self.model,
                "messages": messages,
                "stream": True,
                "temperature": 0.7,
                "max_tokens": 2048
            }

            # 如果是完整的URL，直接使用；否则拼接标准路径
            if self.base_url.endswith("/api/chat/completions"):
                url = self.base_url
            elif "/api/" in self.base_url:
                url = self.base_url
            elif "/v1" in self.base_url:
                url = f"{self.base_url}/chat/completions"
            else:
                # 假设是基础URL，需要拼接标准路径
                base_url = self.base_url.rstrip('/')
                url = f"{base_url}/api/chat/completions"

            headers = {
                "Content-Type": "application/json",
                "Authorization": f"Bearer {self.api_key}"
            }

            async with aiohttp.ClientSession() as session:
                async with session.post(url, json=data, headers=headers) as response:
                    if response.status == 200:
                        async for line in response.content:
                            if line:
                                line_text = line.decode('utf-8').strip()
                                if line_text.startswith('data: '):
                                    # 处理SSE格式的数据
                                    json_str = line_text[6:]  # 去掉 'data: ' 前缀
                                    if json_str == '[DONE]':
                                        break

                                    try:
                                        chunk_data = json.loads(json_str)
                                        if 'choices' in chunk_data and len(chunk_data['choices']) > 0:
                                            delta = chunk_data['choices'][0].get('delta', {})
                                            if 'content' in delta and delta['content'] is not None:
                                                yield delta['content']
                                    except json.JSONDecodeError:
                                        continue
                    else:
                        error_text = await response.text()
                        yield f"错误: HTTP {response.status} - {error_text}"

        except Exception as e:
            yield f"错误: {str(e)}"

    async def generate_questions(self, user_input: str) -> list:
        """生成推荐问题 - 适配OpenAI兼容API格式"""
        try:
            # 构建系统提示
            system_prompt = {
                "role": "system",
                "content": "你是一个智能助手，负责根据用户输入的问题生成5个相关的推荐问题。请只返回JSON格式的数组，不要有其他内容。"
            }

            user_prompt = {
                "role": "user",
                "content": f"根据以下问题生成5个相关的推荐问题，以JSON数组格式返回：{user_input}"
            }

            data = {
                "model": self.model,
                "messages": [system_prompt, user_prompt],
                "stream": False,
                "temperature": 0.7,
                "max_tokens": 500
            }

            # 构建URL
            if self.base_url.endswith("/api/chat/completions"):
                url = self.base_url
            elif "/api/" in self.base_url:
                url = self.base_url
            elif "/v1" in self.base_url:
                url = f"{self.base_url}/chat/completions"
            else:
                base_url = self.base_url.rstrip('/')
                url = f"{base_url}/api/chat/completions"

            headers = {
                "Content-Type": "application/json",
                "Authorization": f"Bearer {self.api_key}"
            }

            async with aiohttp.ClientSession() as session:
                async with session.post(url, json=data, headers=headers) as response:
                    if response.status == 200:
                        result = await response.json()

                        if 'choices' in result and len(result['choices']) > 0:
                            content = result['choices'][0]['message']['content']

                            # 尝试从响应中提取JSON
                            try:
                                # 查找JSON数组的开始和结束
                                start_idx = content.find('[')
                                end_idx = content.rfind(']') + 1
                                if start_idx != -1 and end_idx != -1:
                                    json_str = content[start_idx:end_idx]
                                    questions = json.loads(json_str)
                                    if isinstance(questions, list) and len(questions) == 5:
                                        return questions
                            except:
                                # 如果JSON解析失败，尝试直接解析整个内容
                                try:
                                    questions = json.loads(content)
                                    if isinstance(questions, list) and len(questions) == 5:
                                        return questions
                                except:
                                    pass
                    else:
                        error_text = await response.text()
                        print(f"API请求失败: {response.status} - {error_text}")

            # 如果API调用失败或JSON解析失败，返回默认问题
            return [
                f"关于{user_input}的更多信息",
                f"{user_input}的具体应用场景",
                f"如何更好地理解{user_input}",
                f"{user_input}的相关技术",
                f"{user_input}的发展趋势"
            ]

        except Exception as e:
            print(f"生成推荐问题时出错: {e}")
            return [
                "这个问题很有趣，能详细说说吗？",
                "您想了解这个问题的哪个方面？",
                "关于这个话题，您还有其他疑问吗？",
                "我可以帮您深入分析这个问题",
                "您是否想了解相关的技术细节？"
            ]


llm_service = LLMService()