import base64
import json
import logging
import os
import sys
import time
from contextlib import contextmanager
from datetime import datetime, timedelta
from typing import Dict, Optional, Union, Tuple, Any
from typing import List
import polars as pl
from dataclasses import dataclass
from typing import Optional
import psycopg2
import psycopg2.extras
import psycopg2.pool
import requests
import rsa
from deepdiff import DeepDiff


def get_logger():
    log_path, txt_name_time = log_project_path()
    pathname = txt_name_time

    file_handler = logging.FileHandler('%s' % (txt_name_time), mode='a', encoding="utf8")
    file_handler.setFormatter(logging.Formatter(
        '%(asctime)s [%(levelname)s] %(module)s-%(lineno)d:\t%(message)s'
    ))
    file_handler.setLevel(logging.INFO)
    # 创建处理器：sh为控制台处理器，fh为文件处理器
    console_handler = logging.StreamHandler(sys.stdout)
    console_handler.setFormatter(logging.Formatter(
        '%(asctime)s [%(levelname)s] %(module)s-%(lineno)d:\t%(message)s',
        datefmt="%Y/%m/%d %H:%M:%S"
    ))
    console_handler.setLevel(logging.INFO)

    logging.basicConfig(
        level=min(logging.INFO, logging.INFO),
        handlers=[file_handler, console_handler],
    )
    logger = logging.getLogger(__name__)
    # logger.info('')
    return logger

def log_project_path():
    root_path = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
    now_time = datetime.now()
    log_path = os.path.join(root_path, 'logs')
    txt_name_time = now_time.strftime("%Y-%m-%d")
    txt_name_time = os.path.join(log_path + '\\{}.txt'.format(txt_name_time))
    if not os.path.exists(log_path):
        os.mkdir(log_path)
    return log_path, txt_name_time

logger = get_logger()

class GlobalExceptionHandler:
    def __init__(self):
        self.exception_count = 0
        self.last_exception_time = None
        self.exception_details = []

    def handle_exception(self, exc_type, exc_value, exc_traceback):
        print("=== 全局异常处理器被调用 ===")  # 添加调试输出
        self.exception_count += 1
        self.last_exception_time = datetime.now()

        exception_info = {
            'timestamp': self.last_exception_time,
            'type': exc_type.__name__,
            'value': str(exc_value),
            'count': self.exception_count
        }
        self.exception_details.append(exception_info)

        logger.error(
            f"未捕获的异常 [{self.exception_count}]: {exc_type.__name__}: {exc_value}",
            exc_info=(exc_type, exc_value, exc_traceback)
        )

        if issubclass(exc_type, KeyboardInterrupt):
            sys.__excepthook__(exc_type, exc_value, exc_traceback)
            return

        logger.error(f"程序运行异常统计 - 总异常数: {self.exception_count}, 最后异常时间: {self.last_exception_time}")

# 创建并设置全局异常处理器
global_exception_handler = GlobalExceptionHandler()
sys.excepthook = global_exception_handler.handle_exception

class PostgreSQLConnectionPool:
    def __init__(self, min_conn: int, max_conn: int, **kwargs):
        self.pool = psycopg2.pool.ThreadedConnectionPool(
            min_conn, max_conn, **kwargs
        )

    @contextmanager
    def get_transaction(self):
        conn = None
        try:
            conn = self.pool.getconn()
            # 确保每次获取连接时都重置 autocommit 和事务状态
            conn.autocommit = False
            if conn.status == psycopg2.extensions.STATUS_IN_TRANSACTION:
                conn.rollback()  # 清理上个未完成事务
                logger.warning("清理了上一个未完成的事务")

            logger.info("✅ 事务开始")
            yield TransactionContext(conn)

            # 正常结束，提交事务
            conn.commit()
            logger.info("✅ 事务已提交")

        except Exception as e:
            if conn:
                conn.rollback()
                logger.error(f"❌ 事务回滚: {e}")
            raise e
        finally:
            if conn:
                # 归还连接前重置 autocommit，避免影响下次使用
                conn.autocommit = True
                self.pool.putconn(conn)
                logger.info("🔁 连接已归还连接池")
    def execute_in_transaction(self, func, *args, **kwargs):
        with self.get_transaction() as tx:
            return func(tx, *args, **kwargs)
    def close_all(self):
        self.pool.closeall()
        logger.info("🔌 所有连接已关闭")

class TransactionContext:
    def __init__(self, connection):
        self.conn = connection
        self.execute_values_stats = {}

    def execute_values(self, table_name: str, sql: str, argslist: List[Tuple], template: Optional[str] = None, page_size: int = 1000):
        with self.conn.cursor() as cursor:
            psycopg2.extras.execute_values(cursor, sql, argslist, template=template, page_size=page_size)
            logger.info(f"📊 向表 {table_name} 执行 execute_values 完成，影响行数: {cursor.rowcount}，处理数据条数: {len(argslist)}")
            logger.info("="*100)
            logger.info(f"📊 数据导入完成 - 统计汇总")
            logger.info(f"成功行数: {cursor.rowcount}")
            logger.info(f"✅ 累计落表总行数（新增+更新）: {len(argslist)}")
            logger.info("="*100)


    def execute_query_dict(self, query: str, params: tuple = None) -> List[Dict[str, Any]]:
        try:
            with self.conn.cursor(cursor_factory=psycopg2.extras.RealDictCursor) as cursor:
                try:
                    cursor.execute(query, params)
                    return cursor.fetchall()
                finally:
                    cursor.close()
        except Exception as e:
            logger.error(f"执行查询失败: {e}")
            raise

    def commit(self):
        self.conn.commit()
        logger.info("✅ 手动提交事务")

    # 👇 新增：显式回滚
    def rollback(self):
        self.conn.rollback()
        logger.info("❌ 手动回滚事务")

@dataclass
class MetricRecord:
    keys: str
    edb_metrics_code: str
    third_code: str
    third_type: str
    metrics_value: float  # ✅ 核心：统一为 float
    data_time: str
    create_time: str
    modify_time: str
    third_create_time: str

    @classmethod
    def from_dict(cls, data: dict) -> "MetricRecord":
        """
        从原始字典（所有值为 str / None）构建标准化对象
        """
        def safe_str(v) -> str:
            return "" if v is None else str(v)

        def safe_float(v) -> float:
            if v in (None, "", "null", "None"):
                return 0.0
            try:
                # 尝试转为 float，即使输入是 "0"、"0.0"、"123.45"
                return float(v)
            except (ValueError, TypeError):
                logger.warning(f"无法将 metrics_value 转为 float: {v} (keys={data.get('keys', 'N/A')})")
                return 0.0

        return cls(
            keys=safe_str(data.get("keys")),
            edb_metrics_code=safe_str(data.get("edb_metrics_code")),
            third_code=safe_str(data.get("third_code")),
            third_type=safe_str(data.get("third_type")),
            metrics_value=safe_float(data.get("metrics_value")),
            data_time=safe_str(data.get("data_time")),
            create_time=safe_str(data.get("create_time")),
            modify_time=safe_str(data.get("modify_time")),
            third_create_time=safe_str(data.get("third_create_time"))
        )

    def to_tuple(self) -> tuple:
        """
        用于数据库 INSERT 的元组格式，metrics_value 转为字符串（因为 DB 是 TEXT）
        """
        return (
            self.keys,
            self.edb_metrics_code,
            self.third_code,
            self.third_type,
            str(self.metrics_value),  # 数据库字段是 TEXT，所以转为字符串
            self.data_time,
            self.create_time,
            self.modify_time,
            self.third_create_time
        )

    def to_dict(self) -> dict:
        """用于调试或 DeepDiff 的字典格式"""
        return {
            "keys": self.keys,
            "edb_metrics_code": self.edb_metrics_code,
            "third_code": self.third_code,
            "third_type": self.third_type,
            "metrics_value": self.metrics_value,
            "data_time": self.data_time,
            "create_time": self.create_time,
            "modify_time": self.modify_time,
            "third_create_time": self.third_create_time
        }

class DataImporter:
    def __init__(self, api_config: dict, db_config: dict, executor: PostgreSQLConnectionPool):
        self.api_config = api_config
        self.executor = executor
        self.db_config = db_config
        self._cached_token = None
        self.total_inserted_rows = 0  # 累计成功落表的总行数
        self.session = requests.session()
    def _call_api_with_retry(
            self,
            api_url: str,
            api_params: dict,
            headers: Optional[Dict[str, str]] = None,
            timeout: int = 10,
            retry_times: int = 3,
            retry_delay: float = 0.5,
            backoff_factor: float = 2.0,
            session: Optional[requests.Session] = None
    ) -> Union[requests.Response, None]:
        if session is None:
            self.session = requests.Session()
        for attempt in range(retry_times + 1):
            try:
                logger.info(f"调用API: {api_url} (尝试 {attempt + 1}/{retry_times + 1})")
                response = self.session.post(
                    url=api_url,
                    json=api_params,  # 使用 json 参数自动序列化
                    headers=self.session.headers,
                    timeout=timeout
                )
                return response
            except requests.exceptions.Timeout:
                logger.error(f"请求超时 (尝试 {attempt + 1}/{retry_times + 1})")
            except requests.exceptions.ConnectionError:
                logger.error(f"连接错误 (尝试 {attempt + 1}/{retry_times + 1})")
            except requests.exceptions.RequestException as e:
                logger.error(f"请求异常: {e} (尝试 {attempt + 1}/{retry_times + 1})")
            except Exception as e:
                logger.error(f"未知错误: {e} (尝试 {attempt + 1}/{retry_times + 1})")

            # 如果不是最后一次尝试，等待后重试
            if attempt < retry_times:
                delay = retry_delay * (backoff_factor ** attempt)
                logger.info(f"等待 {delay} 秒后重试...")
                time.sleep(delay)

        logger.error(f"API调用失败，已重试 {retry_times} 次")
        return None

    def _generate_token(self, params: Optional[dict] = None) -> str:
        try:
            message = params.get('username') + ";" + datetime.now().strftime("%Y-%m-%d") + ";" + params.get('password')
            # 示例：基于时间戳和预定义密钥生成 token
            public_key_rsa = rsa.PublicKey.load_pkcs1(params.get('public_key'))
            crypto = rsa.encrypt(message.encode("utf-8"), public_key_rsa)
            # todo 这里要转化为 utf-8 序列化 crypto64 = base64.b64encode(crypto)
            crypto64 = base64.b64encode(crypto).decode('utf-8')
            request_params = {'json': crypto64}
            headers = {
                "User-Agent": "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/115.0.0.0 Safari/537.3",
            }
            token = self._call_api_with_retry(api_url = params['auth_url'], api_params=request_params, headers=headers,)
            token = json.loads(token.text)
            token = token["token"]
            logger.info(f"成功生成token: {token}")
            return token
        except Exception as e:
            raise Exception(f"获取 token 失败: {e}")

    def _extract_reader(self, page_no: int) -> List[Dict]:
        token = self._cached_token if self._cached_token else self._generate_token(self.api_config['token_param'])
        try:
            headers = {
                'Content-Type': 'application/json',
                'User-Agent': 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36'
            }
            self.session.headers.update({'token': token})
            api_params = self.api_config['api_params'].copy()
            api_params['pageNo'] = page_no
            response = self._call_api_with_retry(
                api_url=self.api_config['api_url'],
                api_params=api_params,
                headers=headers
            )
            if not response:
                raise Exception(f"第 {page_no} 页 API 请求失败")

            data = response.json()
            if 'message' not in data:
                raise KeyError(f"第 {page_no} 页响应缺少 'message' 字段")

            parsed_data = data['message']
            return parsed_data[:-1] if parsed_data else []  # ✅ 保持原逻辑
        except Exception as e:
            raise e

    def _fetch_all_stream(self):
        current_page = 1
        while True:
            logger.info("="*80 + f"正在请求第 {current_page} 页数据" + "="*80)
            data = self._extract_reader(current_page)
            if not data:
                break
            yield data
            if len(data) < api_config['api_params']['pageSize']:
                break
            current_page += 1

    def _transform_data(self, raw_data: List[Dict]) -> List[MetricRecord]:
        """
        将原始 API 返回的字典列表，转换为标准化的 MetricRecord 对象列表
        所有字段类型标准化、空值处理、数值转换都在这里完成
        """
        transformed = []
        for item in raw_data:
            try:
                if not isinstance(item, dict):
                    logger.warning(f"跳过非字典项: {type(item)} - {item}")
                    continue

                cur_date = item.get('CUR_DATE', '')
                indicator_id = item.get('INDICATOR_ID', '')
                value = item.get('VALUE')

                # 格式化日期
                if cur_date and len(cur_date) == 8:
                    try:
                        formatted_date = datetime.strptime(cur_date, "%Y%m%d").strftime("%Y-%m-%d")
                    except ValueError:
                        formatted_date = ""
                else:
                    formatted_date = ""

                # 处理 VALUE
                if value is None or (isinstance(value, str) and value.strip() == ""):
                    metrics_value = 0.0
                else:
                    try:
                        metrics_value = float(value)
                    except (ValueError, TypeError):
                        logger.warning(f"VALUE 无法转为 float: {repr(value)} (类型: {type(value).__name__})，设为 0.0")
                        metrics_value = 0.0

                # 构造对象
                record = MetricRecord(
                    keys=f"CJ-{indicator_id}{formatted_date}" if formatted_date else f"CJ-{indicator_id}",
                    edb_metrics_code=f"CJ-{indicator_id}",
                    third_code=indicator_id,
                    third_type="CJ",
                    metrics_value=metrics_value,
                    data_time=formatted_date,
                    create_time=datetime.now().strftime('%Y-%m-%d %H:%M:%S'),
                    modify_time=datetime.now().strftime('%Y-%m-%d %H:%M:%S'),
                    third_create_time=""
                )
                transformed.append(record)

            except Exception as e:
                logger.error(f"💥 _transform_data 转换失败，原始项: {item}, 错误: {e}")
                continue

        return transformed

    def _writer(self, data: List[MetricRecord]) -> bool:
        if not data:
            logger.warning("没有数据需要导入")
            return True

        table_name = self.db_config['table_name']
        argslist = [record.to_tuple() for record in data]

        self.executor.execute_in_transaction(lambda tx: tx.execute_values(
            table_name,
            sql=f"""
                INSERT INTO {table_name}
                    (keys, edb_metrics_code, third_code, third_type, metrics_value,
                     data_time, create_time, modify_time, third_create_time)
                VALUES %s
                ON CONFLICT (keys)
                DO UPDATE SET
                    edb_metrics_code = EXCLUDED.edb_metrics_code,
                    third_code = EXCLUDED.third_code,
                    third_type = EXCLUDED.third_type,
                    metrics_value = EXCLUDED.metrics_value,
                    data_time = EXCLUDED.data_time,
                    modify_time = CURRENT_TIMESTAMP,
                    third_create_time = EXCLUDED.third_create_time
                """,
            argslist=argslist,
            template="(%s, %s, %s, %s, %s, %s, %s, %s, %s)",
            page_size=self.api_config.get('batch_config', {}).get('batch_size', 3000)
        ))

        return True

    def _run_import(self) -> bool:
        success_count = 0
        total_pages = 0
        failed_pages = 0
        try:
            for page_data in self._fetch_all_stream():
                total_pages += 1
                try:
                    logger.info(f"第{total_pages}页转换和写入")
                    transformed = self._transform_data(page_data)
                    if self._writer(transformed):
                        success_count += 1
                except Exception as e:
                    failed_pages += 1
                    raise Exception(f"第 {total_pages} 页处理失败: {e}")
            if total_pages == 0:
                logger.info("未处理到任何页")
            logger.info(f"完成：{total_pages} 页，成功 {success_count} 页，失败 {failed_pages} 页")
            return success_count > 0
        except Exception as e:
            raise Exception(f"❌ run_import执行失败, 异常是: {e}")

if __name__ == '__main__':
    api_config = {
        'batch_config': {
            'batch_size': 1000,  # 每批次处理的记录数
            'max_workers': 4  # 并发处理线程数
        },
        'api_params': {
            'ETL_DATE': (datetime.now() - timedelta(days=100)).strftime("%Y-%m-%d %H:%M:%S"),
            'pageSize': 100,
            'pageNo': 1
        },
        'token_param': {
            'auth_url': 'http://u.95579.com/dataapi/auth',
            'username': "thfund@cjsc.com.cn",
            'password': "dt7mTNwA",
            'public_key': """
                                  -----BEGIN RSA PUBLIC KEY-----
                                  MIGJAoGBAJDhq0rhTN2YeAICdi5Zpl2sqQTCDXGCrCxY9gx1Jb4+etCZkfy/0PZQ
                                  ZHA7TBmrbert4PUZKbpy0qEYnxvWGGmZ/cJ0YYSgue2aPSfkq/QRS/Y58W+j+TcO
                                  lCv+dZ7Wi0k68Yu7wyH+ZbDo55ySwLpiI/o+IQvzOwRPHlQ0yIkVAgMBAAE=
                                  -----END RSA PUBLIC KEY-----
                                  """
        },
        'api_url': 'http://u.95579.com/dataapi/get_t_alp_data_value',
    }

    # 数据库配置
    db_config = {
        'standalone': {
            'table_config': {
                'table_name': 'bbb_index_center_third_edb_metrics_data',
            },
            'sql_connect': {
                'dbname': 'postgres',
                'user': 'postgres',
                'password': 'postgres',
                'host': '127.0.0.1',
                'port': '5432'
            }
        },
        'test_connect_config': {
            'table_config': {
                'table_name': 'a_bb_lsy_test_index_center_third_edb_metrics_data'
            },
            'sql_connect': {
                'dbname': 'odpstest',
                'user': 'LTAI5tQo9VtJ414iSEZrE8Vn',
                'password': 'BPBKqDiQ7JKPl6o3QDIFg6kh71nakS',
                'host': 'hgprecn-cn-v641lnkxm003-cn-shanghai.hologres.aliyuncs.com',
                'port': '80'
            },
            'application_name': "third_edb_data",
        }
        # ,
        # 'prod_connect_config': {
        #     'table_config': {
        #         'mapping_table': 'cj_tree_mapping',
        #         'tree_table': 'meta_public_data_index_tree',
        #         'metrics_table': 'index_center_third_edb_metrics_main'
        #     },
        #     'dbname': 'dataocean',
        #     'application_name': "third_edb_data",
        #     'user': getParam(user),
        #     'password': getParam(password),
        #     'host': getParam(host),
        #     'port': getParam(port)
        # }
    }

    config_env = 'standalone'
    logger.info(f"配置环境选择: {config_env}")
    selected_config = db_config[config_env]
    logger.info("创建数据库连接池")
    executor = PostgreSQLConnectionPool(min_conn=5, max_conn=20, **selected_config['sql_connect'])
    dataImporter = DataImporter(api_config, selected_config['table_config'], executor)
    try:
        dataImporter._run_import()
    except Exception as e:
        raise Exception(f"❌ 执行失败: {e}")
    finally:
        executor.close_all()

