import logging
import math
import time  # 用于重试等待
from contextlib import contextmanager
from dataclasses import dataclass
from datetime import datetime, timedelta
from functools import lru_cache
from typing import List, Dict, Any

import pymysql
import requests  # 替换 aiohttp 为 requests

# 配置日志
logging.basicConfig(
    level=logging.INFO,
    format='%(asctime)s - %(name)s - %(levelname)s - %(message)s'
)
logger = logging.getLogger("StarRocksDataInserter")


@dataclass
class DatabaseConfig:
    """数据库配置类"""
    host: str
    port: int
    user: str
    password: str
    database: str
    charset: str = 'utf8mb4'
    autocommit: bool = False


@dataclass
class APIConfig:
    """API配置类"""
    url: str
    headers: Dict[str, str]
    payload_template: Dict[str, Any]
    timeout: int = 30
    max_retries: int = 3


class StarRocksDataInserter:
    """StarRocks数据插入器主类"""

    def __init__(self, db_config: DatabaseConfig, api_config: APIConfig, table_name: str):
        self.db_config = db_config
        self.api_config = api_config
        self.table_name = table_name
        self._connection_pool = []
        self._max_pool_size = 5
        self.session = requests.Session()  # 创建 requests Session 对象用于连接复用

    @contextmanager
    def get_db_connection(self):
        """获取数据库连接（连接池管理）"""
        conn = None
        try:
            if self._connection_pool:
                conn = self._connection_pool.pop()
            else:
                conn = pymysql.connect(
                    host=self.db_config.host,
                    port=self.db_config.port,
                    user=self.db_config.user,
                    password=self.db_config.password,
                    database=self.db_config.database,
                    charset=self.db_config.charset,
                    autocommit=self.db_config.autocommit,
                    cursorclass=pymysql.cursors.DictCursor
                )
            yield conn
        finally:
            if conn:
                self._connection_pool.append(conn)

    def fetch_api_data(self, page_no: int, page_size: int) -> tuple:
        """同步获取API数据"""
        url = self.api_config.url
        headers = self.api_config.headers
        payload = {**self.api_config.payload_template, "pageNo": page_no, "pageSize": page_size}

        for attempt in range(self.api_config.max_retries):
            try:
                response = self.session.post(
                    url,
                    headers=headers,
                    json=payload,
                    timeout=self.api_config.timeout
                )
                response.raise_for_status()  # 检查请求是否成功
                json_data = response.json()

                if json_data.get('code') != 0:
                    raise ValueError(f"API返回错误码: {json_data.get('code')}")

                data = json_data.get('data', {}).get('data', {})
                return data.get('list', []), data.get('total', 0)

            except (requests.RequestException, ValueError) as e:
                logger.warning(f"API请求失败（第{attempt + 1}次尝试）: {e}")
                if attempt == self.api_config.max_retries - 1:
                    logger.error(f"API请求最终失败: {e}")
                    return [], 0
                time.sleep(2 ** attempt)  # 指数退避

    def process_date_fields(self, data_list: List[Dict]) -> List[Dict]:
        """处理日期字段（优化版）"""
        date_fields = ['updateTime', 'createTime']

        for item in data_list:
            for field in date_fields:
                if field in item:
                    timestamp = item[field]
                    if isinstance(timestamp, (int, float)) and timestamp > 0:
                        try:
                            dt = datetime.utcfromtimestamp(timestamp / 1000)
                            item[field] = dt.strftime('%Y-%m-%d')
                        except (ValueError, OSError):
                            item[field] = None
                    elif isinstance(timestamp, str):
                        try:
                            datetime.strptime(timestamp, '%Y-%m-%d')
                        except ValueError:
                            item[field] = None
        return data_list

    @lru_cache(maxsize=1)
    def get_table_columns(self) -> List[str]:
        """获取表列信息（带缓存）"""
        with self.get_db_connection() as conn:
            with conn.cursor() as cursor:
                cursor.execute("""
                    SELECT COLUMN_NAME 
                    FROM information_schema.COLUMNS 
                    WHERE TABLE_SCHEMA = %s 
                    AND TABLE_NAME = %s
                    ORDER BY ORDINAL_POSITION
                """, (self.db_config.database, self.table_name))
                return [row['COLUMN_NAME'] for row in cursor.fetchall()]

    def prepare_batch_data(self, data: List[Dict], target_date: str) -> List[tuple]:
        """准备批量插入数据"""
        table_columns = self.get_table_columns()

        # 调整列顺序，确保dt字段在第一位置
        if 'dt' in table_columns:
            table_columns.remove('dt')
        table_columns.insert(0, 'dt')

        batch_values = []
        for item in data:
            values = []
            for col in table_columns:
                if col == 'dt':
                    values.append(target_date)
                else:
                    value = item.get(col)
                    # 优化空值处理
                    values.append(value if value is not None else None)
            batch_values.append(tuple(values))

        return batch_values, table_columns

    def insert_data_batch(self, data: List[Dict], batch_size: int = 2000) -> Dict[str, int]:
        """批量插入数据到StarRocks"""
        target_date = (datetime.now() - timedelta(days=1)).strftime('%Y-%m-%d')
        batch_values, table_columns = self.prepare_batch_data(data, target_date)

        columns_str = ", ".join([f"`{col}`" for col in table_columns])
        placeholders = ", ".join(["%s"] * len(table_columns))
        insert_query = f"INSERT INTO {self.table_name} ({columns_str}) VALUES ({placeholders})"

        results = {'deleted': 0, 'inserted': 0, 'processed': len(data)}

        with self.get_db_connection() as conn:
            with conn.cursor() as cursor:
                # 删除已有数据
                delete_query = f"DELETE FROM {self.table_name} WHERE dt = %s"
                cursor.execute(delete_query, (target_date,))
                results['deleted'] = cursor.rowcount

                # 批量插入
                for i in range(0, len(batch_values), batch_size):
                    batch = batch_values[i:i + batch_size]
                    try:
                        cursor.executemany(insert_query, batch)
                        results['inserted'] += len(batch)
                        logger.info(f"已批量插入 {len(batch)} 条记录，总计 {results['inserted']} 条")
                    except pymysql.MySQLError as e:
                        logger.error(f"批量插入失败: {e}")
                        conn.rollback()
                        raise

                conn.commit()

        return results

    def execute(self) -> Dict[str, Any]:
        """执行完整的数据同步流程"""
        logger.info("开始数据同步流程")

        # 获取数据总数
        initial_data, total_records = self.fetch_api_data(1, 10)
        if total_records == 0:
            logger.warning("未获取到有效数据")
            return {'status': 'failed', 'reason': 'no_data'}

        logger.info(f"总记录数: {total_records}")

        # 顺序获取所有数据
        page_size = 500
        total_pages = math.ceil(total_records / page_size)

        all_data = []
        for page in range(1, total_pages + 1):
            try:
                page_data, _ = self.fetch_api_data(page, page_size)
                if page_data:
                    processed_data = self.process_date_fields(page_data)
                    all_data.extend(processed_data)
                    logger.info(f"已处理第 {page}/{total_pages} 页数据，获取到 {len(page_data)} 条记录")
                else:
                    logger.warning(f"第 {page} 页未获取到数据")
            except Exception as e:
                logger.error(f"处理第 {page} 页数据时出错: {e}")

        # 批量插入数据
        insert_results = self.insert_data_batch(all_data)

        logger.info(f"数据同步完成: {insert_results}")
        return {**insert_results, 'status': 'success'}


def main():
    """主函数"""
    # 配置表名
    table_name = "tmp_ods_gys_sajg_region_hierarchy_dd"

    # 配置数据库
    db_config = DatabaseConfig(
        host='172.29.32.176',
        port=9030,
        user='xyc',
        password='ys@Gz62#jkYU',
        database='xyc'
    )

    # 配置API
    api_config = APIConfig(
        url='http://172.29.96.184:48080/admin-api/infra/videoplatform/regions',
        headers={
            'User-Agent': 'Apifox/1.0.0 (https://apifox.com)',
            'Content-Type': 'application/json'
        },
        payload_template={
            "treeCode": "0",
            "env": 1
        }
    )

    # 创建并执行插入器
    inserter = StarRocksDataInserter(db_config, api_config, table_name)

    try:
        inserter.execute()  # 直接同步调用
    except KeyboardInterrupt:
        logger.info("用户中断执行")
    except Exception as e:
        logger.error(f"执行失败: {e}")
    finally:
        inserter.session.close()  # 关闭 requests Session


if __name__ == "__main__":
    main()
