from datetime import datetime
from itemadapter import ItemAdapter
from scrapy.exceptions import DropItem
from scrapy.utils.serialize import ScrapyJSONEncoder

from web_crawler.items import FundHoldingItem
from web_crawler.utils import MySQLPool


class FundholdingPipeline:
    """基金持股数据Pipeline"""

    def __init__(self):
        self.logger = None
        self.batch_size = 2000
        self.pool = MySQLPool()
        self.batch_buffer = []

    def open_spider(self, spider):
        """爬虫启动时初始化"""
        self.logger = spider.logger
        self._create_table()
        self.logger.info("基金持股Pipeline初始化完成")

    def close_spider(self, spider):
        """爬虫关闭时处理剩余数据"""
        if self.batch_buffer:
            self._flush_batch()
        self.logger.info("基金持股Pipeline关闭")

    def _create_table(self):
        """创建基金持股数据表"""
        self.logger.info("确保基金持股数据表存在")
        conn = self.pool.get_conn()
        try:
            with conn.cursor() as cursor:
                cursor.execute(
                    """
                        CREATE TABLE IF NOT EXISTS fund_holding (
                            id BIGINT AUTO_INCREMENT PRIMARY KEY,
                            org_name_or_fund_name VARCHAR(255) NOT NULL,
                            symbol VARCHAR(20) NOT NULL,
                            report_date DATE NOT NULL,
                            held_num INT,
                            to_float_shares_ratio DECIMAL(10,6),
                            created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
                            updated_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP ON UPDATE CURRENT_TIMESTAMP,
                            UNIQUE KEY uk_fund_holding (org_name_or_fund_name, symbol, report_date),
                            INDEX idx_symbol (symbol),
                            INDEX idx_report_date (report_date),
                            INDEX idx_org_name (org_name_or_fund_name(100))
                        ) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_0900_ai_ci
                        COMMENT='基金持股数据表'
                    """
                )
                conn.commit()
                self.logger.info("基金持股数据表验证/创建完成")
        except Exception as e:
            self.logger.error(f"创建表失败: {e}")
            raise DropItem(f"数据库表创建失败: {e}")
        finally:
            conn.close()

    def process_item(self, item, spider):
        """处理FundHoldingItem"""
        if not isinstance(item, FundHoldingItem):
            return item
        adapter = ItemAdapter(item)
        # 数据验证
        required_fields = ['org_name_or_fund_name', 'symbol', 'report_date']
        for field in required_fields:
            if not adapter.get(field):
                raise DropItem(f"缺少必要字段: {field}")

        # 添加到批量缓冲区
        self.batch_buffer.append({'org_name_or_fund_name': adapter.get('org_name_or_fund_name'),
                                  'symbol': adapter.get('symbol'),
                                  'report_date': adapter.get('report_date'),
                                  'held_num': adapter.get('held_num'),
                                  'to_float_shares_ratio': adapter.get('to_float_shares_ratio'),})

        # 达到批量大小时刷新
        if len(self.batch_buffer) >= self.batch_size:
            self._flush_batch()

        return item

    def _flush_batch(self):
        """批量插入数据到MySQL"""
        if not self.batch_buffer:
            return

        conn = self.pool.get_conn()
        try:
            with conn.cursor() as cursor:
                sql = """
                    INSERT INTO fund_holding 
                    (org_name_or_fund_name, symbol, report_date, held_num, to_float_shares_ratio)
                    VALUES (%(org_name_or_fund_name)s, %(symbol)s, %(report_date)s, %(held_num)s, %(to_float_shares_ratio)s)
                    ON DUPLICATE KEY UPDATE
                    held_num = VALUES(held_num),
                    to_float_shares_ratio = VALUES(to_float_shares_ratio),
                    updated_at = CURRENT_TIMESTAMP
                """
                cursor.executemany(sql, self.batch_buffer)
                conn.commit()
                self.logger.info(f"批量插入 {len(self.batch_buffer)} 条基金持股数据")

        except Exception as e:
            if conn:
                conn.rollback()
            self.logger.error(f"批量插入失败: {e}")
        finally:
            if conn:
                conn.close()
            self.batch_buffer = []
