# # pipelines.py
import mysql.connector
from eastmoney_scraper.items import StockItem
import logging
class StockPipeline:
    def __init__(self):
        self.logger = logging.getLogger(__name__)
        self.items_buffer = []

    def open_spider(self, spider):
        self.connection = mysql.connector.connect(
            host='192.168.220.131',
            database='eastmoney',
            user='root',
            password='123456'
        )
        self.cursor = self.connection.cursor()
        self.batch_size = 100  # 批量插入的数量

    def close_spider(self, spider):
        if self.items_buffer:
            self._insert_items_batch(self.items_buffer)
        self.cursor.close()
        self.connection.close()

    def process_item(self, item, spider):
        self.items_buffer.append(tuple(item.values()))
        if len(self.items_buffer) >= self.batch_size:
            self._insert_items_batch(self.items_buffer)
            self.items_buffer = []
        return item

    def _insert_items_batch(self, items):
        insert_query = """
            INSERT INTO stock_data (
                code, name, related_links, latest_price, price_change_percentage,
                price_change_amount, volume_hands, turnover_amount, amplitude,
                highest, lowest, opening_price, previous_close_price,
                turnover_rate, dynamic_pe_ratio, pb_ratio
            ) VALUES (%s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s)
        """
        try:
            self.cursor.executemany(insert_query, items)
            self.connection.commit()
            self.logger.info(f"Successfully inserted {len(items)} records.")
        except Exception as e:
            self.logger.error(f"Failed to insert batch of records: {e}")
            self.connection.rollback()