"""
消息生产API
"""
from typing import Optional, Dict, Any, List
from fastapi import APIRouter, HTTPException, Depends
from pydantic import BaseModel, Field, validator
import orjson
import asyncio
from datetime import datetime

from app.core.kafka_client import get_kafka_client
from app.utils.logger import logger

router = APIRouter(prefix="/api/producer", tags=["消息生产"])


class MessageProduceRequest(BaseModel):
    """消息生产请求"""
    topic: str = Field(..., description="目标Topic")
    key: Optional[str] = Field(None, description="消息Key")
    value: str = Field(..., description="消息内容")
    headers: Optional[Dict[str, str]] = Field(default_factory=dict, description="消息头")
    partition: Optional[int] = Field(None, description="指定分区")
    message_format: str = Field(default="json", description="消息格式：json, text, avro")
    
    @validator('message_format')
    def validate_format(cls, v):
        if v not in ['json', 'text', 'avro']:
            raise ValueError('消息格式必须是 json, text, avro 之一')
        return v


class BatchMessageProduceRequest(BaseModel):
    """批量消息生产请求"""
    topic: str = Field(..., description="目标Topic")
    messages: List[Dict[str, Any]] = Field(..., description="消息列表")
    message_format: str = Field(default="json", description="消息格式")
    
    @validator('messages')
    def validate_messages(cls, v):
        if not v:
            raise ValueError('消息列表不能为空')
        if len(v) > 1000:
            raise ValueError('单次最多发送1000条消息')
        return v


class MessageProduceResponse(BaseModel):
    """消息生产响应"""
    success: bool
    message: str
    topic: str
    partition: Optional[int] = None
    offset: Optional[int] = None
    timestamp: Optional[int] = None


class BatchMessageProduceResponse(BaseModel):
    """批量消息生产响应"""
    success: bool
    message: str
    total_count: int
    success_count: int
    failed_count: int
    results: List[MessageProduceResponse]


@router.post("/send", response_model=MessageProduceResponse)
async def send_message(request: MessageProduceRequest):
    """
    发送单条消息到Kafka Topic
    """
    try:
        kafka_client = get_kafka_client()
        
        # 获取生产者
        producer = await kafka_client.get_producer()
        
        # 准备消息内容 - 转换为bytes
        message_value_str = await _prepare_message_value(request.value, request.message_format)
        message_value = message_value_str.encode('utf-8')
        message_key = request.key.encode('utf-8') if request.key else None

        # 暂时禁用headers功能，避免编码问题
        headers = None
        # if request.headers:
        #     headers = [(k.encode('utf-8'), v.encode('utf-8')) for k, v in request.headers.items()]
        
        # 发送消息
        send_kwargs = {
            "value": message_value,
            "key": message_key,
        }

        # 只有当headers不为空时才添加
        if headers:
            send_kwargs["headers"] = headers

        if request.partition is not None:
            send_kwargs["partition"] = request.partition

        future = await producer.send(request.topic, **send_kwargs)
        
        # 等待发送完成
        record_metadata = await future
        
        logger.info(f"消息发送成功: topic={request.topic}, partition={record_metadata.partition}, offset={record_metadata.offset}")
        
        return MessageProduceResponse(
            success=True,
            message="消息发送成功",
            topic=request.topic,
            partition=record_metadata.partition,
            offset=record_metadata.offset,
            timestamp=record_metadata.timestamp
        )
        
    except Exception as e:
        logger.error(f"消息发送失败: {e}")
        raise HTTPException(status_code=500, detail=f"消息发送失败: {str(e)}")


@router.post("/send-batch", response_model=BatchMessageProduceResponse)
async def send_batch_messages(request: BatchMessageProduceRequest):
    """
    批量发送消息到Kafka Topic
    """
    try:
        kafka_client = get_kafka_client()
        producer = await kafka_client.get_producer()
        
        results = []
        success_count = 0
        failed_count = 0
        
        # 批量发送消息
        for i, msg_data in enumerate(request.messages):
            try:
                # 准备消息数据
                key = msg_data.get('key')
                value = msg_data.get('value', '')
                headers = msg_data.get('headers', {})
                partition = msg_data.get('partition')
                # 支持每个消息的独立格式，如果没有指定则使用全局格式
                msg_format = msg_data.get('message_format', request.message_format)

                # 准备消息内容 - 转换为bytes
                message_value_str = await _prepare_message_value(value, msg_format)
                message_value = message_value_str.encode('utf-8')
                message_key = key.encode('utf-8') if key else None

                # 暂时禁用headers功能
                msg_headers = None
                # if headers:
                #     msg_headers = [(k.encode('utf-8'), str(v).encode('utf-8')) for k, v in headers.items()]
                
                # 发送消息
                send_kwargs = {
                    "value": message_value,
                    "key": message_key,
                }

                # 只有当headers不为空时才添加
                if msg_headers:
                    send_kwargs["headers"] = msg_headers

                if partition is not None:
                    send_kwargs["partition"] = partition

                future = await producer.send(request.topic, **send_kwargs)
                
                # 等待发送完成
                record_metadata = await future
                
                results.append(MessageProduceResponse(
                    success=True,
                    message=f"消息 {i+1} 发送成功",
                    topic=request.topic,
                    partition=record_metadata.partition,
                    offset=record_metadata.offset,
                    timestamp=record_metadata.timestamp
                ))
                success_count += 1
                
            except Exception as e:
                results.append(MessageProduceResponse(
                    success=False,
                    message=f"消息 {i+1} 发送失败: {str(e)}",
                    topic=request.topic
                ))
                failed_count += 1
                logger.error(f"批量发送中的消息 {i+1} 失败: {e}")
        
        logger.info(f"批量发送完成: 总数={len(request.messages)}, 成功={success_count}, 失败={failed_count}")
        
        return BatchMessageProduceResponse(
            success=failed_count == 0,
            message=f"批量发送完成，成功 {success_count} 条，失败 {failed_count} 条",
            total_count=len(request.messages),
            success_count=success_count,
            failed_count=failed_count,
            results=results
        )
        
    except Exception as e:
        logger.error(f"批量消息发送失败: {e}")
        raise HTTPException(status_code=500, detail=f"批量消息发送失败: {str(e)}")


async def _prepare_message_value(value: str, message_format: str) -> str:
    """
    准备消息内容 - 返回字符串，让aiokafka处理编码
    """
    if message_format == "json":
        try:
            # 验证JSON格式
            orjson.loads(value)
            return value
        except orjson.JSONDecodeError:
            raise ValueError("无效的JSON格式")
    elif message_format == "text":
        return value
    elif message_format == "avro":
        # TODO: 实现Avro序列化
        raise NotImplementedError("Avro格式暂未实现")
    else:
        raise ValueError(f"不支持的消息格式: {message_format}")


@router.get("/topics")
async def get_available_topics():
    """
    获取可用的Topic列表（用于生产消息）
    """
    try:
        kafka_client = get_kafka_client()
        topics = await kafka_client.list_topics()
        
        # 过滤掉系统Topic
        user_topics = [topic for topic in topics if not topic.startswith('__')]
        
        return {
            "success": True,
            "topics": user_topics,
            "count": len(user_topics)
        }
        
    except Exception as e:
        logger.error(f"获取Topic列表失败: {e}")
        raise HTTPException(status_code=500, detail=f"获取Topic列表失败: {str(e)}")


@router.get("/topic/{topic_name}/partitions")
async def get_topic_partitions(topic_name: str):
    """
    获取指定Topic的分区信息
    """
    try:
        kafka_client = get_kafka_client()

        # 检查Topic是否存在
        topics = await kafka_client.list_topics()
        if topic_name not in topics:
            raise HTTPException(status_code=404, detail=f"Topic '{topic_name}' 不存在")

        # 获取Topic的分区信息
        try:
            partitions_list = await kafka_client.get_topic_partitions(topic_name)
            partitions = []

            # 构建分区信息
            for partition_id in partitions_list:
                partitions.append({
                    "partition": partition_id,
                    "leader": -1,  # 简化版本，不获取详细的leader信息
                    "replicas": [],
                    "isr": []
                })

            return {
                "success": True,
                "topic": topic_name,
                "partition_count": len(partitions),
                "partitions": partitions
            }

        except Exception as partition_error:
            logger.warning(f"无法获取Topic {topic_name} 的详细分区信息: {partition_error}")
            # 如果无法获取详细分区信息，返回默认的单分区
            return {
                "success": True,
                "topic": topic_name,
                "partition_count": 1,
                "partitions": [{
                    "partition": 0,
                    "leader": -1,
                    "replicas": [],
                    "isr": []
                }]
            }

    except HTTPException:
        raise
    except Exception as e:
        logger.error(f"获取Topic分区信息失败: {e}")
        raise HTTPException(status_code=500, detail=f"获取Topic分区信息失败: {str(e)}")
