import json
import os
from openai import OpenAI
from typing import Optional, Dict, Any, AsyncGenerator
import time
import hmac
import base64
import uuid
import hashlib
import asyncio

class TongYiAPI:
    """API 接口封装"""
    
    PROVIDER_CONFIGS = {
        "通义千问": {
            "base_url": "https://dashscope.aliyuncs.com/compatible-mode/v1",
            "models": [
                "deepseek-r1",
                "deepseek-v3",
                "deepseek-r1-distill-qwen-1.5b",
                "deepseek-r1-distill-qwen-7b",
                "deepseek-r1-distill-qwen-14b",
                "deepseek-r1-distill-qwen-32b",
                "deepseek-r1-distill-llama-8b",
                "deepseek-r1-distill-llama-70b"
            ]
        },
        "DeepSeek": {
            "base_url": "https://api.deepseek.com",
            "models": [
                "deepseek-chat",  # DeepSeek-V3
                "deepseek-reasoner"  # DeepSeek-R1
            ]
        }
    }
    
    def __init__(self):
        self.api_key: Optional[str] = None
        self.model: str = "deepseek-r1"
        self.provider: str = "通义千问"
        self.client = None
        self.load_credentials()
    
    def _init_client(self):
        """初始化 OpenAI 客户端"""
        provider_config = self.PROVIDER_CONFIGS.get(self.provider)
        if not provider_config:
            raise ValueError(f"不支持的服务商: {self.provider}")
            
        self.client = OpenAI(
            api_key=self.api_key,
            base_url=provider_config["base_url"]
        )
    
    def save_credentials(self):
        """保存凭证到配置文件"""
        config_file = f'config_{self.provider}.json'  # 为每个服务商使用独立的配置文件
        config = {
            'api_key': self.api_key,
            'model': self.model,
            'provider': self.provider
        }
        with open(config_file, 'w') as f:
            json.dump(config, f)
        if self.api_key:
            self._init_client()
    
    def load_credentials(self):
        """从配置文件加载凭证"""
        try:
            config_file = f'config_{self.provider}.json'
            if os.path.exists(config_file):
                with open(config_file, 'r') as f:
                    config = json.load(f)
                    self.api_key = config.get('api_key')
                    self.model = config.get('model', self.model)
                    self.provider = config.get('provider', self.provider)
                    if self.api_key:
                        self._init_client()
        except:
            pass
    
    def _generate_signature(self, params: Dict[str, Any]) -> str:
        """生成签名"""
        # 1. 按照参数名称的字典序升序排序
        sorted_params = sorted(params.items())
        
        # 2. 构造规范化请求字符串
        canonicalized_query_string = '&'.join(
            f"{k}={v}" for k, v in sorted_params
        )
        
        # 3. 计算签名
        key = self.api_key.encode('utf-8')
        message = canonicalized_query_string.encode('utf-8')
        signature = base64.b64encode(
            hmac.new(key, message, digestmod=hashlib.sha256).digest()
        ).decode('utf-8')
        
        return signature
    
    def _prepare_request(self, prompt: str) -> Dict[str, Any]:
        """准备请求参数"""
        timestamp = str(int(time.time() * 1000))
        request_id = str(uuid.uuid4())
        
        params = {
            "Action": "GenerateText",
            "Model": self.model,
            "Prompt": prompt,
            "ResultFormat": "text",  # 返回纯文本格式
            "TopP": 0.8,  # 控制输出的多样性
            "TopK": 50,   # 控制输出的确定性
            "Timestamp": timestamp,
            "Version": "2023-12-25",  # API 版本
            "RequestId": request_id,
            "AccessKeyId": self.api_key
        }
        
        # 添加签名
        params["Signature"] = self._generate_signature(params)
        
        return params
    
    async def analyze_text_async(self, text: str, prompt: str) -> str:
        """异步分析文本"""
        if not self.client:
            raise ValueError("API 凭证未设置")
        
        try:
            # 创建流式请求
            stream = self.client.chat.completions.create(
                model=self.model,
                messages=[
                    {'role': 'user', 'content': f"{prompt}\n\n{text}"}
                ],
                stream=True
            )
            
            reasoning_content = ""  # 完整思考过程
            answer_content = ""     # 完整回复
            is_answering = False    # 是否开始回复
            
            for chunk in stream:
                # 处理 usage 信息
                if not getattr(chunk, 'choices', None):
                    continue
                
                delta = chunk.choices[0].delta
                
                # 检查是否有 reasoning_content 属性
                if not hasattr(delta, 'reasoning_content'):
                    continue
                
                # 处理空内容情况
                if not getattr(delta, 'reasoning_content', None) and not getattr(delta, 'content', None):
                    continue
                
                # 处理思考过程
                if getattr(delta, 'reasoning_content', None):
                    reasoning_content += delta.reasoning_content
                # 处理回复内容
                elif getattr(delta, 'content', None):
                    answer_content += delta.content
            
            # 组合最终结果
            if reasoning_content:
                result = f"【思考过程】\n{reasoning_content}\n\n【分析结果】\n{answer_content}"
            else:
                result = answer_content
            
            return result
            
        except Exception as e:
            raise Exception(f"分析过程出错: {str(e)}")
    
    async def analyze_text_stream(self, text: str, prompt: str) -> AsyncGenerator[str, None]:
        """流式输出分析结果"""
        if not self.client:
            raise ValueError("API 凭证未设置")
        
        try:
            stream = self.client.chat.completions.create(
                model=self.model,
                messages=[
                    {'role': 'user', 'content': f"{prompt}\n\n{text}"}
                ],
                stream=True
            )
            
            is_reasoning = True  # 是否在输出思考过程
            
            for chunk in stream:
                if not getattr(chunk, 'choices', None):
                    continue
                
                delta = chunk.choices[0].delta
                
                # 处理思考过程
                if getattr(delta, 'reasoning_content', None):
                    yield ("reasoning", delta.reasoning_content)
                # 处理回复内容
                elif getattr(delta, 'content', None):
                    if is_reasoning:
                        is_reasoning = False
                        yield ("separator", "\n【分析结果】\n")
                    yield ("content", delta.content)
        
        except Exception as e:
            raise Exception(f"分析过程出错: {str(e)}")
    
    async def test_connection_async(self) -> tuple[bool, str]:
        """异步测试API连接"""
        if not self.api_key:
            return False, "API Key 未设置"
        
        try:
            self._init_client()
            result = await self.analyze_text_async(
                "测试",
                "这是一个简短的API测试"
            )
            return True, "连接测试成功"
        except Exception as e:
            return False, f"连接测试失败: {str(e)}"

    def test_connection(self) -> tuple[bool, str]:
        """同步测试API连接"""
        if not self.api_key:
            return False, "API Key 未设置"
        
        try:
            self._init_client()
            # 使用简单的 chat completion 请求测试
            response = self.client.chat.completions.create(
                model=self.model,
                messages=[
                    {'role': 'user', 'content': 'hi'}
                ],
                max_tokens=10  # 限制输出长度
            )
            return True, "连接测试成功"
        except Exception as e:
            return False, f"连接测试失败: {str(e)}" 