import pymysql
from scrapy.exceptions import NotConfigured


class MyspiderPipeline:
    def __init__(self, crawler):
        # 数据库连接配置
        print("数据库建立连接")
        self.db_config = {
            "host": "localhost",
            "user": "root",
            "password": "123456",
            "database": "mytest",
            "port": 3306,
            "charset": "utf8mb4",  # 使用utf8mb4字符集
            "cursorclass": pymysql.cursors.DictCursor
        }
        self.connect = None
        self.cursor = None
        self.items_buffer = []  # 用于批量插入的数据缓冲区
        self.buffer_size = 1000  # 缓冲区大小，达到此数量时执行批量插入
        self.create_connection()
        self.crawler = crawler
        self.spider = None  # 添加这个属性，在process_item中初始化

    @classmethod
    def from_crawler(cls, crawler):
        return cls(crawler)

    def create_connection(self):
        self.connect = pymysql.connect(**self.db_config)
        self.cursor = self.connect.cursor()

    def process_item(self, item, spider):
        # 保存当前spider实例
        if self.spider is None:
            self.spider = spider

        # 将数据添加到缓冲区
        self.items_buffer.append((
            item.get("code"),
            item.get("name"),
            item.get("reads"),
            item.get("review"),
            item.get("title"),
            item.get("content"),
            item.get("author"),
            item.get("postdate"),
            item.get("reply")
        ))

        # 当缓冲区达到指定大小时，执行批量插入
        if len(self.items_buffer) >= self.buffer_size:
            self.insert_batch()

        return item

    def insert_batch(self):
        if not self.items_buffer:
            return

        try:
            # 开启事务
            self.connect.begin()

            # 构造批量插入SQL
            sql = """
            INSERT INTO stockreview 
            (代码, 名称, 阅读数, 评论数, 标题, 内容, 作者, 发布时间, 回复号) 
            VALUES (%s, %s, %s, %s, %s, %s, %s, %s, %s)
            """

            # 执行批量插入
            self.cursor.executemany(sql, self.items_buffer)

            # 提交事务
            self.connect.commit()

            print(f"成功批量插入 {len(self.items_buffer)} 条数据")
            self.items_buffer = []  # 清空缓冲区

        except pymysql.Error as e:
            print(f"批量插入出错: {e}")
            self.connect.rollback()
            # 如果连接断开，重新建立连接
            if self.connect and not self.connect.open:
                self.create_connection()
            else:
                # 无法恢复连接，停止爬虫
                print("数据库连接异常，无法恢复，停止爬虫运行")
                # 使用已保存的spider实例
                if self.spider:
                    self.crawler.engine.close_spider(self.spider, 'db_connection_error')

    def close_spider(self, spider):
        # 处理缓冲区中剩余的数据
        if self.items_buffer:
            self.insert_batch()

        # 关闭数据库连接
        if self.cursor:
            self.cursor.close()
        if self.connect:
            self.connect.close()
