#!/usr/bin/env python3
# -*- coding: utf-8 -*-

import anthropic
import os
import json
import asyncio
from typing import List, Dict, Any, Optional
import base64
from pathlib import Path


# 1. 基础配置和初始化
class ClaudeAPIClient:
    def __init__(self, api_key: Optional[str] = None):
        """
        初始化Claude API客户端

        Args:
            api_key: Anthropic API密钥，如果为None则从环境变量ANTHROPIC_API_KEY获取
        """
        self.api_key = api_key or os.getenv('ANTHROPIC_API_KEY')
        if not self.api_key:
            raise ValueError("请设置ANTHROPIC_API_KEY环境变量或传入api_key参数")

        self.client = anthropic.Anthropic(api_key=self.api_key)
        self.async_client = anthropic.AsyncAnthropic(api_key=self.api_key)

    def simple_chat(self, message: str, model: str = "claude-3-sonnet-20240229") -> str:
        """
        简单的对话接口

        Args:
            message: 用户消息
            model: 使用的模型名称

        Returns:
            Claude的回复文本
        """
        try:
            response = self.client.messages.create(
                model=model,
                max_tokens=1000,
                messages=[
                    {"role": "user", "content": message}
                ]
            )
            return response.content[0].text
        except Exception as e:
            return f"错误: {str(e)}"

    def multi_turn_chat(self, messages: List[Dict[str, str]],
                        model: str = "claude-3-sonnet-20240229",
                        max_tokens: int = 1000,
                        temperature: float = 0.7) -> str:
        """
        多轮对话接口

        Args:
            messages: 对话历史，格式为[{"role": "user/assistant", "content": "消息内容"}]
            model: 模型名称
            max_tokens: 最大token数
            temperature: 温度参数，控制随机性

        Returns:
            Claude的回复
        """
        try:
            response = self.client.messages.create(
                model=model,
                max_tokens=max_tokens,
                temperature=temperature,
                messages=messages
            )
            return response.content[0].text
        except Exception as e:
            return f"错误: {str(e)}"

    def chat_with_system_prompt(self, message: str, system_prompt: str,
                                model: str = "claude-3-sonnet-20240229") -> str:
        """
        带系统提示的对话

        Args:
            message: 用户消息
            system_prompt: 系统提示
            model: 模型名称

        Returns:
            Claude的回复
        """
        try:
            response = self.client.messages.create(
                model=model,
                max_tokens=1000,
                system=system_prompt,
                messages=[
                    {"role": "user", "content": message}
                ]
            )
            return response.content[0].text
        except Exception as e:
            return f"错误: {str(e)}"

    def chat_with_image(self, message: str, image_path: str,
                        model: str = "claude-3-sonnet-20240229") -> str:
        """
        发送图片和文本消息

        Args:
            message: 文本消息
            image_path: 图片文件路径
            model: 模型名称

        Returns:
            Claude的回复
        """
        try:
            # 读取并编码图片
            with open(image_path, "rb") as image_file:
                image_data = base64.b64encode(image_file.read()).decode()

            # 获取图片的媒体类型
            image_extension = Path(image_path).suffix.lower()
            media_type_map = {
                '.jpg': 'image/jpeg',
                '.jpeg': 'image/jpeg',
                '.png': 'image/png',
                '.gif': 'image/gif',
                '.webp': 'image/webp'
            }
            media_type = media_type_map.get(image_extension, 'image/jpeg')

            response = self.client.messages.create(
                model=model,
                max_tokens=1000,
                messages=[
                    {
                        "role": "user",
                        "content": [
                            {
                                "type": "image",
                                "source": {
                                    "type": "base64",
                                    "media_type": media_type,
                                    "data": image_data
                                }
                            },
                            {
                                "type": "text",
                                "content": message
                            }
                        ]
                    }
                ]
            )
            return response.content[0].text
        except Exception as e:
            return f"错误: {str(e)}"

    def streaming_chat(self, message: str, model: str = "claude-3-sonnet-20240229"):
        """
        流式对话，实时返回响应

        Args:
            message: 用户消息
            model: 模型名称

        Yields:
            逐步返回的响应文本
        """
        try:
            with self.client.messages.stream(
                    model=model,
                    max_tokens=1000,
                    messages=[
                        {"role": "user", "content": message}
                    ]
            ) as stream:
                for text in stream.text_stream:
                    yield text
        except Exception as e:
            yield f"错误: {str(e)}"

    async def async_chat(self, message: str, model: str = "claude-3-sonnet-20240229") -> str:
        """
        异步对话接口

        Args:
            message: 用户消息
            model: 模型名称

        Returns:
            Claude的回复
        """
        try:
            response = await self.async_client.messages.create(
                model=model,
                max_tokens=1000,
                messages=[
                    {"role": "user", "content": message}
                ]
            )
            return response.content[0].text
        except Exception as e:
            return f"错误: {str(e)}"

    async def batch_async_chat(self, messages_list: List[str],
                               model: str = "claude-3-sonnet-20240229") -> List[str]:
        """
        批量异步处理多个消息

        Args:
            messages_list: 消息列表
            model: 模型名称

        Returns:
            回复列表
        """
        tasks = [self.async_chat(msg, model) for msg in messages_list]
        return await asyncio.gather(*tasks)


# 2. 使用示例
def main():
    # 初始化客户端
    client = ClaudeAPIClient()

    print("=== Claude API Python示例 ===\n")

    # 1. 简单对话
    print("1. 简单对话示例:")
    model = 'claude-sonnet-4-20250514'
    response = client.simple_chat("你好，请用中文介绍一下自己", model=model)
    print(f"Claude: {response}\n")

    # 2. 多轮对话
    print("2. 多轮对话示例:")
    conversation = [
        {"role": "user", "content": "我想学习Python编程"},
        {"role": "assistant", "content": "太好了！Python是一门非常适合初学者的编程语言。你想从哪个方面开始学习呢？"},
        {"role": "user", "content": "请给我一个简单的代码示例"}
    ]

    response = client.multi_turn_chat(conversation, model=model)
    print(f"Claude: {response}\n")

    # 3. 带系统提示的对话
    print("3. 带系统提示的对话:")
    system_prompt = "你是一个专业的Python编程导师，请用简洁明了的方式回答问题。"
    response = client.chat_with_system_prompt(
        "什么是列表推导式？",
        system_prompt,
         model = model
    )
    print(f"Claude: {response}\n")

    # 4. 流式响应示例
    print("4. 流式响应示例:")
    print("Claude: ", end="", flush=True)
    for chunk in client.streaming_chat("写一首关于编程的短诗", model=model):
        print(chunk, end="", flush=True)
    print("\n")

    # 5. 图片分析示例（如果有图片文件）
    # image_path = "example_image.jpg"
    # if os.path.exists(image_path):
    #     print("5. 图片分析示例:")
    #     response = client.chat_with_image("这张图片里有什么？", image_path)
    #     print(f"Claude: {response}\n")


# 3. 异步处理示例
async def async_examples():
    client = ClaudeAPIClient()

    print("=== 异步处理示例 ===\n")

    # 单个异步请求
    print("1. 异步单个请求:")
    response = await client.async_chat("用一句话解释什么是机器学习")
    print(f"Claude: {response}\n")

    # 批量异步请求
    print("2. 批量异步请求:")
    questions = [
        "什么是人工智能？",
        "Python有哪些优势？",
        "如何学习数据科学？"
    ]

    responses = await client.batch_async_chat(questions)
    for i, response in enumerate(responses):
        print(f"问题{i + 1}: {questions[i]}")
        print(f"Claude: {response}\n")


# 4. 实用工具函数
class ClaudeUtils:
    @staticmethod
    def count_tokens_estimate(text: str) -> int:
        """
        粗略估算token数量（实际计算需要使用官方tokenizer）

        Args:
            text: 文本内容

        Returns:
            估算的token数量
        """
        # 简单估算：英文约4字符=1token，中文约1.5字符=1token
        chinese_chars = len([c for c in text if '\u4e00' <= c <= '\u9fff'])
        other_chars = len(text) - chinese_chars
        return int(chinese_chars / 1.5 + other_chars / 4)

    @staticmethod
    def save_conversation(conversation: List[Dict], filename: str):
        """保存对话历史到JSON文件"""
        with open(filename, 'w', encoding='utf-8') as f:
            json.dump(conversation, f, ensure_ascii=False, indent=2)

    @staticmethod
    def load_conversation(filename: str) -> List[Dict]:
        """从JSON文件加载对话历史"""
        with open(filename, 'r', encoding='utf-8') as f:
            return json.load(f)


# 5. 交互式聊天机器人
def interactive_chat():
    client = ClaudeAPIClient()
    conversation = []

    print("=== Claude 交互式聊天 ===")
    print("输入 'quit' 退出，输入 'clear' 清空对话历史\n")

    while True:
        user_input = input("你: ").strip()

        if user_input.lower() == 'quit':
            print("再见！")
            break
        elif user_input.lower() == 'clear':
            conversation = []
            print("对话历史已清空")
            continue
        elif not user_input:
            continue

        # 添加用户消息到对话历史
        conversation.append({"role": "user", "content": user_input})

        # 获取Claude的回复
        response = client.multi_turn_chat(conversation)

        # 添加Claude的回复到对话历史
        conversation.append({"role": "assistant", "content": response})

        print(f"Claude: {response}\n")


if __name__ == "__main__":
    # 设置API密钥（在实际使用时请通过环境变量设置）
    os.environ['ANTHROPIC_API_KEY'] = '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'

    # 运行基础示例
    main()

    # 运行异步示例
    # asyncio.run(async_examples())

    # 运行交互式聊天（取消注释以启用）
    # interactive_chat()