import boto3, os, tempfile
from confluent_kafka import Producer
from dataclasses import dataclass, asdict
from urllib.parse import urljoin
from typing import Optional
import json

from config import OSS_AK, OSS_BUCKET, OSS_ENDPOINT, OSS_SK, KAFKA_BROKERS, KAFKA_TOPIC,KAFKA_MECHANISM,KAFKA_USERNAME,KAFKA_PASSWORD

@dataclass
class Payload:
    tel_phone_number: str
    group_id: int
    group_name: str
    sender_name: str
    sender_id: int
    send_at: str
    content: str
    media_type: str


class Uploader:
    def __init__(self, 
                 endpoint_url: Optional[str] = None,
                 aws_access_key_id: Optional[str] = None,
                 aws_secret_access_key: Optional[str] = None,
                 bucket_name: Optional[str] = None,
                 base_url: Optional[str] = None,
                 kafka_bootstrap_servers: Optional[str] = None,
                 kafka_topic: Optional[str] = None,
                 kafka_mechanism: Optional[str] = None,
                 kafka_username: Optional[str] = None,
                 kafka_password: Optional[str] = None,
                 ) -> None:
        print(f"[DEBUG] 进入函数: Uploader.__init__")
        # 使用参数或配置文件的配置
        self.endpoint_url = endpoint_url or OSS_ENDPOINT
        self.aws_access_key_id = aws_access_key_id or OSS_AK
        self.aws_secret_access_key = aws_secret_access_key or OSS_SK
        self.bucket_name = bucket_name or OSS_BUCKET
        self.base_url = base_url or self._generate_base_url()
        self.kafka_bootstrap_servers = kafka_bootstrap_servers or KAFKA_BROKERS
        self.kafka_topic = kafka_topic or KAFKA_TOPIC
        self.kafka_mechanism = kafka_mechanism or KAFKA_MECHANISM
        self.kafka_username = kafka_username or KAFKA_USERNAME
        self.kafka_password = kafka_password or KAFKA_PASSWORD

        # 初始化S3客户端
        # Todo
        # self.s3 = boto3.client(
        #     "s3",
        #     endpoint_url=self.endpoint_url,
        #     aws_access_key_id=self.aws_access_key_id,
        #     aws_secret_access_key=self.aws_secret_access_key
        # )
        
        # 初始化Kafka Producer
        self.kafka = self._init_kafka_producer()

    def _init_kafka_producer(self) -> Producer:
        print(f"[DEBUG] 进入函数: Uploader._init_kafka_producer")
        """初始化 Kafka Producer"""
        try:
            conf = {
                'bootstrap.servers': self.kafka_bootstrap_servers,
                'client.id': 'telegram-uploader-producer',
                'message.max.bytes': 10000000,  # 10MB最大消息大小
                'compression.type': 'lz4',       # 启用压缩
                'security.protocol': 'SASL_PLAINTEXT',         # 使用 SASL 明文认证
                'sasl.mechanism': self.kafka_mechanism,        # SASL 机制为 PLAIN
                'sasl.username': self.kafka_username,          # 用户名
                'sasl.password': self.kafka_password,          # 密码
            }
            producer = Producer(conf)
            print(f"✅ Kafka Producer 创建成功，连接到: {self.kafka_bootstrap_servers}")
            return producer
        except Exception as e:
            print(f"❌ 无法连接到 Kafka: {e}")
            print("请确保 Kafka 服务正在运行")
            raise ConnectionError(f"Kafka连接失败: {e}")

    def _delivery_report(self, err, msg):
        print(f"[DEBUG] 进入函数: Uploader._delivery_report")
        """消息发送回调函数"""
        if err is not None:
            print(f"❌ 消息发送失败: {err}")
        else:
            print(f"✅ 消息已发送：topic={msg.topic()}, partition={msg.partition()}, offset={msg.offset()}")
    
    def _generate_base_url(self) -> str:
        print(f"[DEBUG] 进入函数: Uploader._generate_base_url")
        """生成基础URL"""
        if self.endpoint_url.startswith('http'):
            # 如果是完整的endpoint，直接使用
            return f"{self.endpoint_url.rstrip('/')}/{self.bucket_name}/"
        else:
            # 否则构造标准S3 URL
            return f"https://{self.bucket_name}.{self.endpoint_url}/"

    def upload_media(self, local_path: str, key_prefix: str = 'tg/') -> str:
        print(f"[DEBUG] 进入函数: Uploader.upload_media")
        """
        上传媒体文件到OSS
        
        Args:
            local_path: 本地文件路径
            key_prefix: OSS存储路径前缀
            
        Returns:
            文件的公开访问URL
        """
        # 生成唯一的对象键名
        filename = os.path.basename(local_path)
        key = f'{key_prefix.rstrip("/")}/{filename}'
        
        # 上传文件
        #Todo
        # self.s3.upload_file(local_path, self.bucket_name, key)
        #Todo
        res = urljoin(self.base_url, key)
        print(f"[DEBUG] upload_media: 模拟文件上传完成返回的路径: {res}")
        # 返回完整的访问URL
        return res

    def generate_presigned_url(self, key: str, expiration: int = 3600) -> str:
        print(f"[DEBUG] 进入函数: Uploader.generate_presigned_url")
        """
        生成预签名URL（用于临时访问私有文件）
        
        Args:
            key: OSS对象键名
            expiration: URL过期时间（秒）
            
        Returns:
            预签名URL
        """
        return self.s3.generate_presigned_url(
            'get_object',
            Params={'Bucket': self.bucket_name, 'Key': key},
            ExpiresIn=expiration
        )

    def send(self, p: Payload):
        print(f"[DEBUG] 进入函数: Uploader.send")
        """
        发送消息到Kafka
        
        Args:
            p: Payload数据类实例
        """
        print(f"[DEBUG] 展示要上传的参数: {p}")

        if self.kafka is None:
            print("❌ Kafka Producer 未初始化")
            return False
        
        try:
            # 将dataclass转换为字典
            message_dict = asdict(p)
            
            # 将消息转换为JSON字符串
            message_value = json.dumps(message_dict, ensure_ascii=False).encode('utf-8')
            
            # 使用group_id作为消息key进行分区
            message_key = str(message_dict.get("group_id", "")).encode('utf-8')
            print(f"[DEBUG] 展示转换的json字符串: {message_value}")
            # 发送消息
            self.kafka.produce(
                topic=self.kafka_topic, 
                key=message_key, 
                value=message_value, 
                callback=self._delivery_report
            )
            
            # 立即刷新确保消息发送
            self.kafka.flush(timeout=5.0)
            
            print(f"✅ 消息已发送到Kafka topic: {self.kafka_topic}")
            return True
            
        except Exception as e:
            print(f"❌ 发送消息到Kafka失败: {e}")
            return False

    def close(self):
        print(f"[DEBUG] 进入函数: Uploader.close")
        """关闭资源"""
        if self.kafka:
            self.kafka.flush(timeout=10.0)
            # KafkaProducer不需要显式关闭，但可以清理资源