import sqlite3
import logging
from itemadapter import ItemAdapter
from scrapy.exceptions import DropItem

class SQLitePipeline:
    def __init__(self, sqlite_db):
        self.sqlite_db = sqlite_db
        self.conn = None
        self.cur = None

    @classmethod
    def from_crawler(cls, crawler):
        return cls(
            sqlite_db=crawler.settings.get('SQLITE_DB', 'ximalaya.db')
        )

    def open_spider(self, spider):
        """爬虫启动时创建数据库连接"""
        self.conn = sqlite3.connect(self.sqlite_db)
        self.cur = self.conn.cursor()
        self.create_tables()

    def close_spider(self, spider):
        """爬虫关闭时关闭数据库连接"""
        if self.conn:
            self.conn.close()

    def create_tables(self):
        """创建数据表"""
        # 专辑表
        self.cur.execute('''
            CREATE TABLE IF NOT EXISTS albums (
                id INTEGER PRIMARY KEY AUTOINCREMENT,
                album_id TEXT UNIQUE NOT NULL,
                album_title TEXT,
                album_cover TEXT,
                album_description TEXT,
                album_category TEXT,
                album_author TEXT,
                album_play_count INTEGER DEFAULT 0,
                album_track_count INTEGER DEFAULT 0,
                album_subscribe_count INTEGER DEFAULT 0,
                album_share_count INTEGER DEFAULT 0,
                album_url TEXT,
                created_time INTEGER,
                created_at DATETIME DEFAULT CURRENT_TIMESTAMP
            )
        ''')
        
        # 音频表
        self.cur.execute('''
            CREATE TABLE IF NOT EXISTS tracks (
                id INTEGER PRIMARY KEY AUTOINCREMENT,
                track_id TEXT UNIQUE NOT NULL,
                track_title TEXT,
                track_duration TEXT,
                track_play_count INTEGER DEFAULT 0,
                track_like_count INTEGER DEFAULT 0,
                track_comment_count INTEGER DEFAULT 0,
                track_cover TEXT,
                track_mp3_url TEXT,
                album_id TEXT,
                created_time INTEGER,
                created_at DATETIME DEFAULT CURRENT_TIMESTAMP,
                FOREIGN KEY (album_id) REFERENCES albums (album_id)
            )
        ''')
        
        # 创建索引
        self.cur.execute('CREATE INDEX IF NOT EXISTS idx_albums_category ON albums(album_category)')
        self.cur.execute('CREATE INDEX IF NOT EXISTS idx_tracks_album ON tracks(album_id)')
        self.cur.execute('CREATE INDEX IF NOT EXISTS idx_albums_author ON albums(album_author)')
        
        self.conn.commit()

    def process_item(self, item, spider):
        """处理抓取的数据项"""
        try:
            if 'album_id' in item and item['album_id']:
                self.save_album(item)
            elif 'track_id' in item and item['track_id']:
                self.save_track(item)
        except Exception as e:
            logging.error(f"保存数据失败: {e}, 数据: {item}")
        
        return item

    def save_album(self, item):
        """保存专辑数据"""
        sql = '''
        INSERT OR REPLACE INTO albums (
            album_id, album_title, album_cover, album_description,
            album_category, album_author, album_play_count,
            album_track_count, album_subscribe_count, album_share_count,
            album_url, created_time
        ) VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)
        '''
        
        values = (
            item.get('album_id'),
            item.get('album_title'),
            item.get('album_cover'),
            item.get('album_description'),
            item.get('album_category'),
            item.get('album_author'),
            item.get('album_play_count', 0),
            item.get('album_track_count', 0),
            item.get('album_subscribe_count', 0),
            item.get('album_share_count', 0),
            item.get('album_url'),
            item.get('created_time')
        )
        
        self.cur.execute(sql, values)
        self.conn.commit()
        logging.info(f"保存专辑: {item.get('album_title')}")

    def save_track(self, item):
        """保存音频数据"""
        sql = '''
        INSERT OR REPLACE INTO tracks (
            track_id, track_title, track_duration, track_play_count,
            track_like_count, track_comment_count, track_cover,
            track_mp3_url, album_id, created_time
        ) VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?)
        '''
        
        values = (
            item.get('track_id'),
            item.get('track_title'),
            item.get('track_duration'),
            item.get('track_play_count', 0),
            item.get('track_like_count', 0),
            item.get('track_comment_count', 0),
            item.get('track_cover'),
            item.get('track_mp3_url'),
            item.get('album_id'),
            item.get('created_time')
        )
        
        self.cur.execute(sql, values)
        self.conn.commit()
        # 使用spider的logger记录日志
        import inspect
        for frame in inspect.stack():
            if 'spider' in frame.frame.f_locals:
                frame.frame.f_locals['spider'].logger.info(f"保存音频: {item.get('track_title')}")
                break