import math
import asyncio
import aiohttp
import pymysql
from typing import List, Dict, Any, Optional
from datetime import datetime, timedelta
import logging
from dataclasses import dataclass
from contextlib import contextmanager
from functools import lru_cache

# 配置日志
logging.basicConfig(
    level=logging.INFO,
    format='%(asctime)s - %(name)s - %(levelname)s - %(message)s'
)
logger = logging.getLogger("StarRocksDataInserter")


@dataclass
class DatabaseConfig:
    """数据库配置类"""
    host: str
    port: int
    user: str
    password: str
    database: str
    charset: str = 'utf8mb4'
    autocommit: bool = False


@dataclass
class APIConfig:
    """API配置类"""
    url: str
    headers: Dict[str, str]
    payload_template: Dict[str, Any]
    timeout: int = 30
    max_retries: int = 3


class StarRocksDataInserter:
    """StarRocks数据插入器主类"""

    def __init__(self, db_config: DatabaseConfig, api_config: APIConfig, table_name: str):
        self.db_config = db_config
        self.api_config = api_config
        self.table_name = table_name  # 将表名作为实例属性
        self._connection_pool = []
        self._max_pool_size = 5

    @contextmanager
    def get_db_connection(self):
        """获取数据库连接（连接池管理）"""
        conn = None
        try:
            if self._connection_pool:
                conn = self._connection_pool.pop()
            else:
                conn = pymysql.connect(
                    host=self.db_config.host,
                    port=self.db_config.port,
                    user=self.db_config.user,
                    password=self.db_config.password,
                    database=self.db_config.database,
                    charset=self.db_config.charset,
                    autocommit=self.db_config.autocommit,
                    cursorclass=pymysql.cursors.DictCursor
                )
            yield conn
        finally:
            if conn:
                self._connection_pool.append(conn)

    async def fetch_api_data_async(self, page_no: int, page_size: int) -> tuple:
        """异步获取API数据"""
        url = self.api_config.url
        headers = self.api_config.headers
        payload = {**self.api_config.payload_template, "pageNo": page_no, "pageSize": page_size}

        async with aiohttp.ClientSession() as session:
            for attempt in range(self.api_config.max_retries):
                try:
                    async with session.post(url, headers=headers, json=payload,
                                            timeout=self.api_config.timeout) as response:
                        response.raise_for_status()
                        json_data = await response.json()

                        if json_data.get('code') != 0:
                            raise ValueError(f"API返回错误码: {json_data.get('code')}")

                        data = json_data.get('data', {}).get('data', {})
                        return data.get('list', []), data.get('total', 0)

                except (aiohttp.ClientError, ValueError) as e:
                    if attempt == self.api_config.max_retries - 1:
                        logger.error(f"API请求失败（第{attempt + 1}次尝试）: {e}")
                        return [], 0
                    await asyncio.sleep(2 ** attempt)  # 指数退避

    def process_date_fields(self, data_list: List[Dict]) -> List[Dict]:
        """处理日期字段（优化版）"""
        date_fields = ['updateTime', 'createTime']

        for item in data_list:
            for field in date_fields:
                if field in item:
                    timestamp = item[field]
                    if isinstance(timestamp, (int, float)) and timestamp > 0:
                        try:
                            dt = datetime.utcfromtimestamp(timestamp / 1000)
                            item[field] = dt.strftime('%Y-%m-%d')
                        except (ValueError, OSError):
                            item[field] = None
                    elif isinstance(timestamp, str):
                        try:
                            datetime.strptime(timestamp, '%Y-%m-%d')
                        except ValueError:
                            item[field] = None
        return data_list

    @lru_cache(maxsize=1)
    def get_table_columns(self) -> List[str]:
        """获取表列信息（带缓存）"""
        with self.get_db_connection() as conn:
            with conn.cursor() as cursor:
                cursor.execute("""
                    SELECT COLUMN_NAME 
                    FROM information_schema.COLUMNS 
                    WHERE TABLE_SCHEMA = %s 
                    AND TABLE_NAME = %s
                    ORDER BY ORDINAL_POSITION
                """, (self.db_config.database, self.table_name))  # 使用传入的表名
                return [row['COLUMN_NAME'] for row in cursor.fetchall()]

    def prepare_batch_data(self, data: List[Dict], target_date: str) -> List[tuple]:
        """准备批量插入数据"""
        table_columns = self.get_table_columns()

        # 调整列顺序，确保dt字段在第一位置
        if 'dt' in table_columns:
            table_columns.remove('dt')
        table_columns.insert(0, 'dt')

        batch_values = []
        for item in data:
            values = []
            for col in table_columns:
                if col == 'dt':
                    values.append(target_date)
                else:
                    value = item.get(col)
                    # 优化空值处理
                    values.append(value if value is not None else None)
            batch_values.append(tuple(values))

        return batch_values, table_columns

    async def insert_data_batch(self, data: List[Dict], batch_size: int = 2000) -> Dict[str, int]:
        """批量插入数据到StarRocks"""
        target_date = (datetime.now() - timedelta(days=1)).strftime('%Y-%m-%d')
        batch_values, table_columns = self.prepare_batch_data(data, target_date)

        columns_str = ", ".join([f"`{col}`" for col in table_columns])
        placeholders = ", ".join(["%s"] * len(table_columns))
        insert_query = f"INSERT INTO {self.table_name} ({columns_str}) VALUES ({placeholders})"  # 使用传入的表名

        results = {'deleted': 0, 'inserted': 0, 'processed': len(data)}

        with self.get_db_connection() as conn:
            with conn.cursor() as cursor:
                # 删除已有数据
                delete_query = f"DELETE FROM {self.table_name} WHERE dt = %s"  # 使用传入的表名
                cursor.execute(delete_query, (target_date,))
                results['deleted'] = cursor.rowcount

                # 批量插入
                for i in range(0, len(batch_values), batch_size):
                    batch = batch_values[i:i + batch_size]
                    try:
                        cursor.executemany(insert_query, batch)
                        results['inserted'] += len(batch)
                        logger.info(f"已批量插入 {results['inserted']} 条记录")
                    except pymysql.MySQLError as e:
                        logger.error(f"批量插入失败: {e}")
                        conn.rollback()
                        raise

                conn.commit()

        return results

    async def execute(self) -> Dict[str, Any]:
        """执行完整的数据同步流程"""
        logger.info("开始数据同步流程")

        # 获取数据总数
        initial_data, total_records = await self.fetch_api_data_async(1, 10)
        if total_records == 0:
            logger.warning("未获取到有效数据")
            return {'status': 'failed', 'reason': 'no_data'}

        logger.info(f"总记录数: {total_records}")

        # 并发获取所有数据
        page_size = 500  # 优化的页面大小
        total_pages = math.ceil(total_records / page_size)

        tasks = []
        for page in range(1, total_pages + 1):
            tasks.append(self.fetch_api_data_async(page, page_size))

        all_data = []
        results = await asyncio.gather(*tasks, return_exceptions=True)

        for i, result in enumerate(results, 1):
            if isinstance(result, Exception):
                logger.error(f"获取第 {i} 页数据时出错: {result}")
            else:
                page_data, _ = result
                if page_data:
                    processed_data = self.process_date_fields(page_data)
                    all_data.extend(processed_data)
                    logger.info(f"已处理第 {i}/{total_pages} 页数据")

        # 批量插入数据
        insert_results = await self.insert_data_batch(all_data)

        logger.info(f"数据同步完成: {insert_results}")
        return {**insert_results, 'status': 'success'}


def main():
    """主函数"""
    # 配置表名
    table_name = "tmp_ods_gys_sajg_region_hierarchy_dd"  # 表名作为变量

    # 配置数据库
    db_config = DatabaseConfig(
        host='172.29.32.176',
        port=9030,
        user='xyc',
        password='ys@Gz62#jkYU',
        database='xyc'
    )

    # 配置API
    api_config = APIConfig(
        url='http://172.29.96.184:48080/admin-api/infra/videoplatform/regions',
        headers={
            'User-Agent': 'Apifox/1.0.0 (https://apifox.com)',
            'Content-Type': 'application/json'
        },
        payload_template={
            "treeCode": "0",
            "env": 1
        }
    )

    # 创建并执行插入器，传入表名
    inserter = StarRocksDataInserter(db_config, api_config, table_name)

    try:
        asyncio.run(inserter.execute())
    except KeyboardInterrupt:
        logger.info("用户中断执行")
    except Exception as e:
        logger.error(f"执行失败: {e}")


if __name__ == "__main__":
    main()
