import os
import time
import json
from typing import Dict, List, Optional

# 安装必要库 (实际使用时取消注释)
# !pip install openai anthropic requests

class ModelAPI:
    """
    多模型API统一调用接口
    支持模型：OpenAI GPT, Anthropic Claude, 百度文心一言, 讯飞星火
    """
    def __init__(self):
        # 加载API密钥（建议使用环境变量）
        self.api_keys = {
            "openai": os.getenv("OPENAI_API_KEY"),
            "anthropic": os.getenv("ANTHROPIC_API_KEY"),
            "baidu": os.getenv("BAIDU_API_KEY"),
            "iflytek": os.getenv("IFLYTEK_API_KEY")
        }
        self.history = []  # 存储对话历史
        
    def _call_openai(self, model: str, prompt: str) -> str:
        """调用OpenAI接口"""
        from openai import OpenAI
        
        client = OpenAI(api_key=self.api_keys["openai"])
        response = client.chat.completions.create(
            model=model,
            messages=self._format_history() + [{"role": "user", "content": prompt}],
            temperature=0.7,
            max_tokens=1000
        )
        return response.choices[0].message.content.strip()

    def _call_anthropic(self, model: str, prompt: str) -> str:
        """调用Anthropic接口"""
        from anthropic import Anthropic
        
        client = Anthropic(api_key=self.api_keys["anthropic"])
        message = client.messages.create(
            model=model,
            messages=self._format_history(service="anthropic") + [{"role": "user", "content": prompt}],
            max_tokens=1000
        )
        return message.content[0].text.strip()

    def _call_baidu(self, model: str, prompt: str) -> str:
        """调用百度文心一言接口"""
        import requests
        
        url = "https://aip.baidubce.com/rpc/2.0/ai_custom/v1/wenxinworkshop/chat/completions?access_token=" + self.api_keys["baidu"]
        
        payload = json.dumps({
            "messages": self._format_history() + [{"role": "user", "content": prompt}]
        })
        headers = {'Content-Type': 'application/json'}
        
        response = requests.post(url, headers=headers, data=payload)
        return response.json()["result"]

    def _call_iflytek(self, model: str, prompt: str) -> str:
        """调用讯飞星火接口"""
        # 简化实现，实际需使用星火API SDK
        # 此处仅展示流程，真实使用需替换为官方SDK调用
        return f"[讯飞星火] 已收到: {prompt}"

    def _format_history(self, service: str = "standard") -> List[dict]:
        """将对话历史转换为API需要的格式"""
        formatted = []
        for entry in self.history:
            role = entry["role"]
            # Anthropic使用"user"和"assistant"，其他服务保持一致
            if service == "anthropic" and role == "ai":
                role = "assistant"
            formatted.append({"role": role, "content": entry["content"]})
        return formatted

    def call_model(
        self, 
        model_type: str, 
        model_name: str, 
        prompt: str,
        system_prompt: Optional[str] = None
    ) -> str:
        """
        统一模型调用入口
        :param model_type: 模型类型 (openai/anthropic/baidu/iflytek)
        :param model_name: 具体模型名称
        :param prompt: 用户输入
        :param system_prompt: 系统级提示词
        :return: 模型响应文本
        """
        if system_prompt:
            self.history.insert(0, {"role": "system", "content": system_prompt})
        
        # 记录用户输入
        self.history.append({"role": "user", "content": prompt})
        
        try:
            if model_type == "openai":
                response = self._call_openai(model_name, prompt)
            elif model_type == "anthropic":
                response = self._call_anthropic(model_name, prompt)
            elif model_type == "baidu":
                response = self._call_baidu(model_name, prompt)
            elif model_type == "iflytek":
                response = self._call_iflytek(model_name, prompt)
            else:
                raise ValueError(f"不支持的模型类型: {model_type}")
            
            # 记录AI响应
            self.history.append({"role": "ai", "content": response})
            return response
            
        except Exception as e:
            return f"API调用错误: {str(e)}"

    def clear_history(self):
        """清空对话历史"""
        self.history = []

class ChatClient:
    """对话客户端交互界面"""
    def __init__(self):
        self.model_api = ModelAPI()
        self.current_model = {
            "type": "openai",
            "name": "gpt-3.5-turbo"
        }
        
        # 可用模型列表
        self.model_options = {
            "openai": ["gpt-3.5-turbo", "gpt-4", "gpt-4-turbo"],
            "anthropic": ["claude-3-opus-20240229", "claude-3-sonnet-20240229"],
            "baidu": ["ERNIE-Bot-4"],
            "iflytek": ["Spark-v3.5"]
        }

    def switch_model(self, model_type: str, model_name: str):
        """切换模型"""
        if model_type in self.model_options:
            if model_name in self.model_options[model_type]:
                self.current_model = {"type": model_type, "name": model_name}
                print(f"✓ 已切换到: {model_type}/{model_name}")
            else:
                print(f"该类型下无效的模型名称，可选项: {', '.join(self.model_options[model_type])}")
        else:
            print(f"无效的模型类型，可选项: {', '.join(self.model_options.keys())}")