import pymysql
from sqlalchemy import create_engine, Column, Integer, String, Float, Text, DateTime
from sqlalchemy.ext.declarative import declarative_base
from sqlalchemy.orm import sessionmaker
import logging
from datetime import datetime

# 创建基类
Base = declarative_base()

class ForexTable(Base):
    """
    外汇数据表模型
    对应作业要求的输出格式
    """
    __tablename__ = 'forex_rates'
    
    id = Column(Integer, primary_key=True, autoincrement=True)
    currency = Column(String(50), nullable=False, comment='货币名称')  # Currency
    tbp = Column(Float, comment='现汇买入价')  # Telegraphic Transfer Buying Price
    cbp = Column(Float, comment='现钞买入价')  # Cash Buying Price  
    tsp = Column(Float, comment='现汇卖出价')   # Telegraphic Transfer Selling Price
    csp = Column(Float, comment='现钞卖出价')   # Cash Selling Price
    publish_time = Column(String(20), comment='发布时间')  # Time
    crawl_time = Column(String(20), comment='爬取时间')
    
    def __repr__(self):
        return f"<Forex(currency='{self.currency}', tbp={self.tbp}, time='{self.publish_time}')>"

class SQLitePipeline:
    """
    SQLite数据库管道（推荐使用，避免MySQL配置问题）
    """
    
    def __init__(self):
        self.engine = None
        self.Session = None
        self.logger = logging.getLogger(__name__)

    def open_spider(self, spider):
        """爬虫启动时连接数据库"""
        try:
            # 使用SQLite数据库
            self.engine = create_engine('sqlite:///forex_data.db')
            # 创建表
            Base.metadata.create_all(self.engine)
            self.Session = sessionmaker(bind=self.engine)
            spider.logger.info("✅ 成功连接到SQLite数据库")
        except Exception as e:
            spider.logger.error(f"❌ 连接数据库失败: {e}")
            raise

    def close_spider(self, spider):
        """爬虫关闭时关闭数据库连接"""
        if self.engine:
            self.engine.dispose()

    def process_item(self, item, spider):
        """处理并存储外汇数据"""
        try:
            session = self.Session()
            
            # 创建数据库记录
            forex_record = ForexTable(
                currency=item['currency'],
                tbp=item['tbp'],
                cbp=item['cbp'],
                tsp=item['tsp'],
                csp=item['csp'],
                publish_time=item['publish_time'],
                crawl_time=item['crawl_time']
            )
            
            # 添加到数据库
            session.add(forex_record)
            session.commit()
            
            spider.logger.info(f"✅ 成功保存外汇数据: {item['currency']}")
            
        except Exception as e:
            session.rollback()
            spider.logger.error(f"❌ 保存外汇数据失败: {e}")
        finally:
            session.close()
        
        return item

class MySQLPipeline:
    """
    MySQL数据库管道（备用方案）
    """
    
    def __init__(self, mysql_uri):
        self.mysql_uri = mysql_uri
        self.engine = None
        self.Session = None

    @classmethod
    def from_crawler(cls, crawler):
        return cls(
            mysql_uri=crawler.settings.get('MYSQL_URI', 'mysql+pymysql://root:@localhost:3306/forex_db')
        )

    def open_spider(self, spider):
        """连接MySQL数据库"""
        try:
            self.engine = create_engine(self.mysql_uri)
            Base.metadata.create_all(self.engine)
            self.Session = sessionmaker(bind=self.engine)
            spider.logger.info("✅ 成功连接到MySQL数据库")
        except Exception as e:
            spider.logger.error(f"❌ 连接MySQL数据库失败: {e}")

    def process_item(self, item, spider):
        """处理MySQL存储"""
        # 实现与SQLitePipeline类似的逻辑
        return item

class ConsolePipeline:
    """
    控制台输出管道
    """
    
    def process_item(self, item, spider):
        """在控制台输出外汇信息"""
        print(f"\n=== 外汇牌价数据 ===")
        print(f"货币名称: {item['currency']}")
        print(f"现汇买入价: {item['tbp']}")
        print(f"现钞买入价: {item['cbp']}")
        print(f"现汇卖出价: {item['tsp']}")
        print(f"现钞卖出价: {item['csp']}")
        print(f"发布时间: {item['publish_time']}")
        print(f"爬取时间: {item['crawl_time']}")
        print("=" * 40)
        
        return item

class ValidationPipeline:
    """
    数据验证管道
    """
    
    def process_item(self, item, spider):
        """验证数据完整性"""
        required_fields = ['currency', 'tbp', 'cbp', 'tsp', 'csp', 'publish_time']
        
        for field in required_fields:
            if field not in item or not item[field]:
                spider.logger.warning(f"数据缺少必要字段: {field}")
                raise scrapy.exceptions.DropItem(f"缺少字段: {field}")
        
        # 验证价格数据合理性
        if item['tbp'] <= 0 or item['tsp'] <= 0:
            raise scrapy.exceptions.DropItem("价格数据不合理")
            
        return item