import os
import requests
import json
from typing import Generator

class DeepSeekAPI:
    def __init__(self, api_key: str = None):
        """
        初始化API客户端
        :param api_key: 从环境变量DEEPSEEK_API_KEY自动加载（优先级：参数 > 环境变量）
        """
        self.api_key = api_key or os.getenv("API_KEY")
        if not self.api_key:
            raise ValueError("未找到API密钥，请通过参数传入或设置环境变量DEEPSEEK_API_KEY")

        self.base_url = "https://api.deepseek.com"
        self.headers = {
            "Authorization": f"Bearer {self.api_key}",
            "Content-Type": "application/json",
            "Accept": "application/json"
        }

    def chat_completion(
        self,
        prompt: str,
        model: str = "deepseek-chat",
        temperature: float = 0.7,
        max_tokens: int = 1000,
        stream: bool = False
    ) -> Generator[str, None, None]:
        """
        执行对话生成（支持流式响应）
        :param prompt: 用户输入的提示文本
        :param model: 可用模型 deepseek-chat / deepseek-coder
        :param temperature: 生成多样性控制（0.0-1.0）
        :param max_tokens: 最大生成token数
        :param stream: 是否启用流式响应
        :return: 生成文本的迭代器
        """
        endpoint = f"{self.base_url}/chat/completions"
        payload = {
            "model": model,
            "messages": [{"role": "user", "content": prompt}],
            "temperature": temperature,
            "max_tokens": max_tokens,
            "stream": stream
        }

        try:
            response = requests.post(
                endpoint,
                headers=self.headers,
                json=payload,
                stream=stream,
                timeout=10
            )
            response.raise_for_status()

            if stream:
                return self._handle_stream_response(response)
            else:
                return self._handle_normal_response(response)

        except requests.exceptions.RequestException as e:
            print(f"API请求失败: {str(e)}")
            return iter([])

    def _handle_normal_response(self, response: requests.Response) -> Generator[str, None, None]:
        """处理普通响应"""
        try:
            data = response.json()
            if 'choices' in data and len(data['choices']) > 0:
                content = data['choices'][0]['message']['content']
                yield content
            else:
                yield "未收到有效响应"
        except json.JSONDecodeError:
            yield "响应解析失败"

    def _handle_stream_response(self, response: requests.Response) -> Generator[str, None, None]:
        """处理流式响应"""
        buffer = ""
        for line in response.iter_lines():
            if line:
                decoded_line = line.decode('utf-8')
                if decoded_line.startswith('data:'):
                    try:
                        chunk = json.loads(decoded_line[5:].strip())
                        delta = chunk['choices'][0]['delta'].get('content', '')
                        buffer += delta
                        
                        # 智能分段输出
                        if any(punct in delta for punct in ('\n', '。', '!', '?')):
                            yield buffer.strip()
                            buffer = ""
                        else:
                            yield delta
                    except json.JSONDecodeError:
                        continue
        # 返回最后剩余内容
        if buffer.strip():
            yield buffer.strip()

# 使用示例 =================================================================
if __name__ == "__main__":
    # 初始化客户端（API密钥从环境变量读取）
    client = DeepSeekAPI()

    # 示例1：普通请求
    print("普通模式响应：")
    for response in client.chat_completion("解释量子计算的基本原理", stream=False):
        print(response)

    # 示例2：流式请求
    print("\n流式模式响应：")
    full_response = []
    for chunk in client.chat_completion("用Python实现快速排序算法", stream=True):
        print(chunk, end='', flush=True)
        full_response.append(chunk)
    print("\n完整响应：", ''.join(full_response))