# Define your item pipelines here
#
# Don't forget to add your pipeline to the ITEM_PIPELINES setting
# See: https://docs.scrapy.org/en/latest/topics/item-pipeline.html


# useful for handling different item types with a single interface
from itemadapter import ItemAdapter

import pymysql
from pymysql import IntegrityError
import pandas as pd
from sqlalchemy import create_engine

class ForexScraperPipeline:
    def open_spider(self, spider):
        # 连接数据库
        self.conn = pymysql.connect(
            host='localhost',
            user='root',
            password='Wlj98192188?',
            db='forex_data',
            charset='utf8mb4',
            cursorclass=pymysql.cursors.DictCursor
        )
        self.cursor = self.conn.cursor()

        # 使用 SQLAlchemy 创建引擎
        self.engine = create_engine('mysql+pymysql://root:Wlj98192188?@localhost/forex_data')

        # 创建表格，如果表格不存在
        self.cursor.execute("""
            CREATE TABLE IF NOT EXISTS forex_data (
                id INT AUTO_INCREMENT PRIMARY KEY,
                currency VARCHAR(255) NOT NULL,
                tbp FLOAT DEFAULT NULL,
                cbp FLOAT DEFAULT NULL,
                tsp FLOAT DEFAULT NULL,
                csp FLOAT DEFAULT NULL,
                time TIME DEFAULT NULL
            )
        """)
        self.conn.commit()

    def process_item(self, item, spider):
        try:
            # 检查字段是否有数据，避免空字段
            if item.get('currency'):
                # 插入数据
                self.cursor.execute("""
                    INSERT INTO forex_data (currency, tbp, cbp, tsp, csp, time) 
                    VALUES (%s, %s, %s, %s, %s, %s)
                """, (
                    item.get('currency'),
                    item.get('tbp') or None,
                    item.get('cbp') or None,
                    item.get('tsp') or None,
                    item.get('csp') or None,
                    item.get('time') or None
                ))
                self.conn.commit()
        except IntegrityError as e:
            spider.logger.error(f"Database integrity error: {e}")
        except Exception as e:
            spider.logger.error(f"Failed to insert item: {e}")

        return item

    def close_spider(self, spider):
        # 导出数据库数据到 CSV 文件
        try:
            query = "SELECT currency, tbp, cbp, tsp, csp, time FROM forex_data"
            data = pd.read_sql(query, self.engine)
            data.to_csv('forex_data_export.csv', index=False, encoding='utf-8-sig')
            spider.logger.info("Data successfully exported to forex_data_export.csv")
        except Exception as e:
            spider.logger.error(f"Failed to export data: {e}")
        finally:
            # 关闭数据库连接
            self.cursor.close()
            self.conn.close()
