from datetime import datetime
from itemadapter import ItemAdapter
from scrapy.exceptions import DropItem
from scrapy.utils.serialize import ScrapyJSONEncoder
from web_crawler.utils import MySQLPool




class XueqiuStockPipeline:
    """获取雪球股票列表写入数据库"""

    def open_spider(self, spider):
        """爬虫启动时自动调用"""
        spider.logger.info("Pipeline初始化完成")

    def close_spider(self, spider):
        self.pool.close()

    def __init__(self):
        self.logger = None
        self.pool = MySQLPool()
        self.create_table()

    def create_table(self):
        """创建数据表（如果不存在）"""
        conn = self.pool.get_conn()
        try:
            with conn.cursor() as cursor:
                cursor.execute("""
                    CREATE TABLE IF NOT EXISTS xueqiu_stocks (
                        id INT AUTO_INCREMENT PRIMARY KEY,
                        symbol VARCHAR(20) NOT NULL COMMENT '股票代码',
                        name VARCHAR(100) COMMENT '股票名称',
                        current DECIMAL(10,2) COMMENT '当前价格',
                        percent DECIMAL(10,4) COMMENT '涨跌幅',
                        page INT COMMENT '采集页码',
                        market VARCHAR(10) COMMENT '市场类型',
                        timestamp DATETIME COMMENT '采集时间',
                        UNIQUE KEY uk_symbol_market (symbol, market),
                        INDEX idx_symbol (symbol),
                        INDEX idx_market (market),
                        INDEX idx_timestamp (timestamp)
                    ) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_0900_ai_ci
                    COMMENT='雪球股票数据表'
                """)
                conn.commit()
        except Exception as e:
            if conn:
                conn.rollback()
            raise DropItem(f"创建表失败: {e}")
        finally:
            conn.close()

    def process_item(self, item, spider):
        """处理并保存Item到MySQL"""
        adapter = ItemAdapter(item)

        # 数据验证
        if not adapter.get('symbol'):
            raise DropItem("股票数据缺少symbol字段")

        # 设置默认值
        adapter.setdefault('timestamp', datetime.now().strftime('%Y-%m-%d %H:%M:%S'))
        adapter.setdefault('market', 'CN')
        adapter.setdefault('page', 1)
        conn = None
        try:
            conn = self.pool.get_conn()
            with conn.cursor() as cursor:
                sql = """
                            INSERT INTO xueqiu_stocks (
                                symbol, name, current, percent, page, market, timestamp
                            ) VALUES (
                                %s, %s, %s, %s, %s, %s, %s
                            )
                            ON DUPLICATE KEY UPDATE
                                name = VALUES(name),
                                current = VALUES(current),
                                percent = VALUES(percent),
                                page = VALUES(page),
                                timestamp = VALUES(timestamp)
                        """
                cursor.execute(sql, (
                    adapter['symbol'],
                    adapter.get('name'),
                    adapter.get('current'),
                    adapter.get('percent'),
                    adapter['page'],
                    adapter['market'],
                    adapter['timestamp']
                ))
                conn.commit()
                spider.logger.debug(f"成功保存/更新股票: {adapter['symbol']}")
                return item

        except Exception as e:
            if conn:
                conn.rollback()
            spider.logger.error(f"保存股票数据失败: {e}, 数据: {dict(adapter)}")
            raise DropItem(f"数据库错误: {e}")
        finally:
            if conn:
                conn.close()