import pymysql
from itemadapter import ItemAdapter
from beautifultable import BeautifulTable
import logging


class DemoPipeline:
    def __init__(self):
        self.db = None
        self.cursor = None

    def open_spider(self, spider):
        try:
            self.db = pymysql.connect(
                host='localhost',
                user='root',
                password='826922',
                database='stocks_db',
                charset='utf8mb4',
                port=3306
            )
            self.cursor = self.db.cursor()
            spider.logger.info("MySQL数据库连接成功")

            # 连接成功后才创建表
            self.create_table(spider)

        except Exception as e:
            spider.logger.error(f"MySQL连接失败: {e}")
            self.db = None
            self.cursor = None
    #创建数据库
    def create_table(self, spider):
        """创建数据表"""
        try:
            self.cursor.execute('''
                CREATE TABLE IF NOT EXISTS stocks (
                    stock_code VARCHAR(20) PRIMARY KEY,
                    stock_name VARCHAR(50),
                    last_price DECIMAL(10,2),
                    change_percent VARCHAR(30),
                    change_amount DECIMAL(10,2),
                    volume VARCHAR(30),
                    amplitude VARCHAR(30),
                    high_price DECIMAL(10,2),
                    low_price DECIMAL(10,2),
                    open_price DECIMAL(10,2),
                    close_price DECIMAL(10,2)
                )ENGINE=InnoDB DEFAULT CHARSET=utf8mb4
            ''')
            spider.logger.info("数据表 stocks 创建成功或已存在")
        except Exception as e:
            spider.logger.error(f"创建表失败: {e}")

    def process_item(self, item, spider):
        """处理每个item，插入数据库"""
        if not self.db or not self.cursor:
            spider.logger.error("数据库未连接，跳过数据插入")
            return item

        adapter = ItemAdapter(item)
        try:
            self.cursor.execute('''
                REPLACE INTO stocks (
                    stock_code, stock_name, last_price, change_percent,
                    change_amount, volume, amplitude, high_price,
                    low_price, open_price, close_price
                ) VALUES (%s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s)
            ''', (
                adapter.get('stock_code'),
                adapter.get('stock_name'),
                adapter.get('last_price'),
                adapter.get('change_percent'),
                adapter.get('change_amount'),
                adapter.get('volume'),
                adapter.get('amplitude'),
                adapter.get('high_price'),
                adapter.get('low_price'),
                adapter.get('open_price'),
                adapter.get('close_price')
            ))
            self.db.commit()
            spider.logger.debug(f"数据插入成功: {adapter.get('stock_code')}")
        except Exception as e:
            self.db.rollback()
            spider.logger.error(f"数据插入失败: {e}")
        return item

    def read_data_from_database(self, spider):
        """从数据库读取数据"""
        if not self.db or not self.cursor:
            spider.logger.error("数据库未连接，无法读取数据")
            return None

        try:
            self.cursor.execute('''
                SELECT stock_code, stock_name, last_price, change_percent,
                       change_amount, volume, amplitude, high_price,
                       low_price, open_price, close_price
                FROM stocks ORDER BY change_percent DESC
            ''')
            stocks_data = self.cursor.fetchall()

            if not stocks_data:
                spider.logger.info("数据库中没有股票数据")
                return None

            db_table = BeautifulTable()
            db_table.columns.header = [
                "股票代码", "股票名称", "最新价", "涨跌幅(%)",
                "涨跌额", "成交量(手)", "振幅(%)", "最高价",
                "最低价", "开盘价", "昨收"
            ]
            db_table.rows.alignment = BeautifulTable.ALIGN_RIGHT
            db_table.columns.width = [10, 12, 10, 12, 10, 12, 12, 10, 10, 10, 10]
            db_table.columns.padding = 1

            for stock in stocks_data:
                db_table.rows.append(stock)

            spider.logger.info(f"从数据库成功读取 {len(stocks_data)} 条股票记录")
            return db_table

        except Exception as e:
            spider.logger.error(f"从数据库读取数据失败: {e}")
            return None

    def close_spider(self, spider):
        """关闭爬虫时的操作"""
        db_table = self.read_data_from_database(spider)
        if db_table:
            print("\n===== MySQL数据库中的所有股票数据 =====")
            print(db_table)
            total_count = len(db_table.rows)
            print(f"\n数据库中共有 {total_count} 条股票记录")

        if self.db:
            self.cursor.close()
            self.db.close()
            spider.logger.info("MySQL数据库连接已关闭")
