import logging
import math
import time
from concurrent.futures import ThreadPoolExecutor, as_completed
from contextlib import contextmanager
from dataclasses import dataclass
from datetime import datetime, timedelta
from functools import lru_cache
from typing import List, Dict, Any, Set, Tuple

import pymysql
import requests
from pymysql import MySQLError
from pymysql.cursors import DictCursor
from requests.exceptions import RequestException

# 配置日志
logging.basicConfig(
    level=logging.INFO,
    format='%(asctime)s - %(name)s - %(levelname)s - %(message)s'
)
logger = logging.getLogger("StarRocksDataInserter")


@dataclass
class DatabaseConfig:
    """数据库配置类"""
    host: str
    port: int
    user: str
    password: str
    database: str
    charset: str = 'utf8mb4'
    autocommit: bool = False
    pool_size: int = 5  # 连接池大小


@dataclass
class APIConfig:
    """API配置类"""
    url: str
    headers: Dict[str, str]
    payload_template: Dict[str, Any]
    timeout: int = 30
    max_retries: int = 3


class DatabaseConnectionPool:
    """数据库连接池管理类"""

    def __init__(self, db_config: DatabaseConfig):
        self.db_config = db_config
        self._pool = []
        self._max_size = db_config.pool_size

    @contextmanager
    def get_connection(self):
        """从连接池获取连接"""
        conn = None
        try:
            if self._pool:
                conn = self._pool.pop()
            else:
                conn = pymysql.connect(
                    host=self.db_config.host,
                    port=self.db_config.port,
                    user=self.db_config.user,
                    password=self.db_config.password,
                    database=self.db_config.database,
                    charset=self.db_config.charset,
                    autocommit=self.db_config.autocommit,
                    cursorclass=DictCursor
                )
            yield conn
        finally:
            if conn:
                self._pool.append(conn)

    def close_all(self):
        """关闭所有连接"""
        for conn in self._pool:
            try:
                conn.close()
            except Exception:
                pass
        self._pool.clear()


class StarRocksDataInserter:
    """StarRocks数据插入器主类"""

    def __init__(self, db_config: DatabaseConfig, api_config: APIConfig, table_name: str):
        self.db_config = db_config
        self.api_config = api_config
        self.table_name = table_name
        self.connection_pool = DatabaseConnectionPool(db_config)
        self.session = requests.Session()
        self.target_date = (datetime.now() - timedelta(days=1)).strftime('%Y-%m-%d')

    def delete_existing_data(self) -> int:
        """删除当前日期的所有数据"""
        deleted_count = 0
        try:
            with self.connection_pool.get_connection() as conn:
                with conn.cursor() as cursor:
                    delete_query = f"DELETE FROM {self.table_name} WHERE dt = %s"
                    cursor.execute(delete_query, (self.target_date,))
                    deleted_count = cursor.rowcount
                    conn.commit()
                    logger.info(f"已删除 {deleted_count} 条 {self.target_date} 的现有数据")
        except Exception as e:
            logger.error(f"删除现有数据失败: {e}")
            if conn:
                conn.rollback()
        return deleted_count

    def get_distinct_region_codes(self) -> Set[str]:
        """从数据库表获取去重的region indexCode（前一天数据）"""
        distinct_codes = set()
        previous_day = (datetime.now() - timedelta(days=1)).strftime('%Y-%m-%d')

        try:
            with self.connection_pool.get_connection() as conn:
                with conn.cursor() as cursor:
                    query = """
                        SELECT DISTINCT indexCode 
                        FROM ods_gys_videoplatform_region_dd 
                        WHERE indexCode IS NOT NULL 
                        AND dt = %s
                    """
                    cursor.execute(query, (previous_day,))
                    distinct_codes = {row['indexCode'] for row in cursor.fetchall()}
                    logger.info(f"获取到 {len(distinct_codes)} 个去重的 regionIndexCode（日期: {previous_day}）")
        except Exception as e:
            logger.error(f"获取去重 regionIndexCode 失败: {e}")
        return distinct_codes

    def fetch_api_data(self, page_no: int, page_size: int, region_index_code: str) -> tuple:
        """获取API数据（根据regionIndexCode）"""
        payload = {
            **self.api_config.payload_template,
            "pageNo": page_no,
            "pageSize": page_size,
            "regionIndexCode": region_index_code
        }

        for attempt in range(self.api_config.max_retries):
            try:
                response = self.session.post(
                    self.api_config.url,
                    headers=self.api_config.headers,
                    json=payload,
                    timeout=self.api_config.timeout
                )
                response.raise_for_status()
                json_data = response.json()

                if json_data.get('code') != 0:
                    raise ValueError(f"API返回错误码: {json_data.get('code')}, 消息: {json_data.get('msg')}")

                data = json_data.get('data', {}).get('data', {})
                return data.get('list', []), data.get('total', 0)

            except (RequestException, ValueError) as e:
                logger.warning(f"API请求失败（第{attempt + 1}次尝试, regionIndexCode: {region_index_code}）: {e}")
                if attempt == self.api_config.max_retries - 1:
                    logger.error(f"API请求最终失败（regionIndexCode: {region_index_code}）: {e}")
                    return [], 0
                time.sleep(2 ** attempt)
        return [], 0

    def process_date_fields(self, data_list: List[Dict], region_index_code: str) -> List[Dict]:
        """处理日期字段并添加region_index_code"""
        for item in data_list:
            item['region_index_code'] = region_index_code

            # 处理日期字段
            for date_field in ['updateTime', 'createTime']:
                if date_field in item:
                    timestamp = item[date_field]
                    if isinstance(timestamp, (int, float)) and timestamp > 0:
                        try:
                            dt = datetime.utcfromtimestamp(timestamp / 1000)
                            item[date_field] = dt.strftime('%Y-%m-%d')
                        except (ValueError, OSError):
                            item[date_field] = None
        return data_list

    @lru_cache(maxsize=1)
    def get_table_columns(self) -> List[str]:
        """获取表列信息（带缓存）"""
        with self.connection_pool.get_connection() as conn:
            with conn.cursor() as cursor:
                cursor.execute("""
                    SELECT COLUMN_NAME 
                    FROM information_schema.COLUMNS 
                    WHERE TABLE_SCHEMA = %s 
                    AND TABLE_NAME = %s
                    ORDER BY ORDINAL_POSITION
                """, (self.db_config.database, self.table_name))
                return [row['COLUMN_NAME'] for row in cursor.fetchall()]

    def prepare_batch_data(self, data: List[Dict]) -> Tuple[List[tuple], List[str]]:
        """准备批量插入数据"""
        table_columns = self.get_table_columns()

        # 调整列顺序，确保dt在第一列
        if 'dt' in table_columns:
            table_columns.remove('dt')
        table_columns.insert(0, 'dt')

        # 确保region_index_code在列列表中
        if 'region_index_code' not in table_columns:
            table_columns.append('region_index_code')

        batch_values = []
        for item in data:
            values = [self.target_date]  # dt值
            for col in table_columns[1:]:  # 跳过dt列
                values.append(item.get(col))
            batch_values.append(tuple(values))

        return batch_values, table_columns

    def insert_data_batch(self, data: List[Dict], batch_size: int = 2000) -> int:
        """批量插入数据到StarRocks"""
        if not data:
            return 0

        batch_values, table_columns = self.prepare_batch_data(data)
        columns_str = ", ".join([f"`{col}`" for col in table_columns])
        placeholders = ", ".join(["%s"] * len(table_columns))
        insert_query = f"INSERT INTO {self.table_name} ({columns_str}) VALUES ({placeholders})"

        inserted_count = 0
        try:
            with self.connection_pool.get_connection() as conn:
                with conn.cursor() as cursor:
                    for i in range(0, len(batch_values), batch_size):
                        batch = batch_values[i:i + batch_size]
                        cursor.executemany(insert_query, batch)
                        inserted_count += len(batch)
                        logger.info(f"已批量插入 {len(batch)} 条记录，总计 {inserted_count} 条")
                    conn.commit()
        except MySQLError as e:
            logger.error(f"批量插入失败: {e}")
            if conn:
                conn.rollback()
            raise

        return inserted_count

    def fetch_all_data_for_region(self, region_index_code: str) -> List[Dict]:
        """获取指定region的所有数据"""
        logger.info(f"开始获取 regionIndexCode: {region_index_code} 的数据")

        # 先获取第一页数据以确定总记录数
        first_page_data, total_records = self.fetch_api_data(1, 10, region_index_code)
        if total_records == 0:
            logger.warning(f"regionIndexCode {region_index_code} 未获取到有效数据")
            return []

        logger.info(f"regionIndexCode {region_index_code} 总记录数: {total_records}")
        page_size = 500
        total_pages = math.ceil(total_records / page_size)

        all_data = []
        for page in range(1, total_pages + 1):
            try:
                page_data, _ = self.fetch_api_data(page, page_size, region_index_code)
                if page_data:
                    processed_data = self.process_date_fields(page_data, region_index_code)
                    all_data.extend(processed_data)
                    logger.info(f"已处理第 {page}/{total_pages} 页数据，获取到 {len(page_data)} 条记录")
                else:
                    logger.warning(f"第 {page} 页未获取到数据")
            except Exception as e:
                logger.error(f"处理第 {page} 页数据时出错: {e}")
                continue

        return all_data

    def process_region(self, region_index_code: str) -> Dict[str, Any]:
        """处理单个regionIndexCode"""
        try:
            region_data = self.fetch_all_data_for_region(region_index_code)
            if not region_data:
                return {'status': 'failed', 'reason': 'no_data', 'region_index_code': region_index_code}

            inserted_count = self.insert_data_batch(region_data)
            return {
                'status': 'success',
                'region_index_code': region_index_code,
                'inserted': inserted_count,
                'processed': len(region_data)
            }
        except Exception as e:
            logger.error(f"处理 regionIndexCode {region_code} 时发生错误: {e}")
            return {
                'status': 'failed',
                'reason': 'unexpected_error',
                'region_index_code': region_index_code,
                'error_msg': str(e)
            }

    def execute_concurrently(self, max_workers: int = 5) -> List[Dict[str, Any]]:
        """并发执行所有region的处理"""
        region_codes = self.get_distinct_region_codes()
        if not region_codes:
            logger.error("未获取到任何有效的 regionIndexCode")
            return [{'status': 'failed', 'reason': 'no_region_codes'}]

        all_results = []
        with ThreadPoolExecutor(max_workers=max_workers) as executor:
            # 提交所有任务
            future_to_region = {
                executor.submit(self.process_region, region_code): region_code
                for region_code in region_codes
            }

            # 处理完成的任务
            for future in as_completed(future_to_region):
                region_code = future_to_region[future]
                try:
                    result = future.result()
                    all_results.append(result)
                except Exception as e:
                    logger.error(f"处理 regionIndexCode {region_code} 时发生未预期错误: {e}")
                    all_results.append({
                        'status': 'failed',
                        'reason': 'unexpected_error',
                        'region_index_code': region_code,
                        'error_msg': str(e)
                    })

        return all_results

    def execute(self, concurrent: bool = True) -> List[Dict[str, Any]]:
        """执行完整的数据同步流程"""
        logger.info("开始数据同步流程")

        # 删除当前日期的所有现有数据
        self.delete_existing_data()

        # 选择执行模式
        if concurrent:
            results = self.execute_concurrently()
        else:
            region_codes = self.get_distinct_region_codes()
            results = [self.process_region(rc) for rc in region_codes]

        # 输出汇总结果
        success_count = sum(1 for r in results if r.get('status') == 'success')
        failed_count = len(results) - success_count
        total_inserted = sum(r.get('inserted', 0) for r in results if r.get('status') == 'success')

        logger.info(f"处理完成汇总: 成功 {success_count} 个, 失败 {failed_count} 个, 总共插入 {total_inserted} 条记录")
        return results

    def close(self):
        """清理资源"""
        self.session.close()
        self.connection_pool.close_all()


def main():
    """主函数"""
    table_name = "ods_gys_videoplatform_camera_monitoring_point_dd"

    db_config = DatabaseConfig(
        host='172.29.32.176',
        port=9030,
        user='xyc',
        password='ys@Gz62#jkYU',
        database='xyc',
        pool_size=10  # 增加连接池大小以支持并发
    )

    api_config = APIConfig(
        url='http://172.29.96.184:48080/admin-api/infra/videoplatform/camerasRegionIndexCode',
        headers={
            'User-Agent': 'Apifox/1.0.0 (https://apifox.com)',
            'Content-Type': 'application/json'
        },
        payload_template={
            "treeCode": "0",
            "env": 1,
        }
    )

    inserter = StarRocksDataInserter(db_config, api_config, table_name)

    try:
        # 使用并发模式执行，可根据需要调整并发数量
        results = inserter.execute(concurrent=True)
    except KeyboardInterrupt:
        logger.info("用户中断执行")
    except Exception as e:
        logger.error(f"执行失败: {e}")
    finally:
        inserter.close()


if __name__ == "__main__":
    main()
