# -*- coding: utf-8 -*-
import sqlite3
import os
import logging
import threading
import json
from contextlib import contextmanager
from datetime import datetime, timedelta
from typing import Optional, Dict, Any, List, Generator
from functools import lru_cache

# 配置日志
logging.basicConfig(
    format='%(asctime)s - %(levelname)s - %(message)s',
    level=logging.INFO
)
logger = logging.getLogger(__name__)


class DatabaseManager:
    _instance = None
    _lock = threading.Lock()
    _pool_lock = threading.Lock()

    def __new__(cls, db_path: str = 'data.db'):
        """单例模式实现"""
        if cls._instance is None:
            with cls._lock:
                if cls._instance is None:
                    cls._instance = super().__new__(cls)
                    cls._instance.__initialized = False
        return cls._instance

    def __init__(self, db_path: str = 'data.db'):
        if self.__initialized:
            return

        self.__initialized = True
        self.db_path = db_path
        self._connection_pool = []
        self._max_pool_size = 5
        self._active_connections = 0

        # 先初始化配置参数
        self.cache_size = 2000  # 2MB
        self.page_size = 4096  # 4KB

        # 然后初始化数据库
        self._init_db()

    def _init_db(self):
        """初始化数据库表结构"""
        with self._get_connection() as conn:
            try:
                conn.execute('''
                    CREATE TABLE IF NOT EXISTS scrape_records (
                        id INTEGER PRIMARY KEY AUTOINCREMENT,
                        url TEXT UNIQUE,
                        platform TEXT,
                        likes INTEGER DEFAULT 0,
                        comments INTEGER DEFAULT 0,
                        shares INTEGER DEFAULT 0,
                        status TEXT CHECK(status IN ('正常', '无法抓取', '失效', '已删除')),
                        created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
                        updated_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP
                    )
                ''')

                conn.execute('''
                    CREATE TABLE IF NOT EXISTS system_logs (
                        id INTEGER PRIMARY KEY AUTOINCREMENT,
                        log_time TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
                        log_type VARCHAR(20),
                        log_content TEXT
                    )
                ''')

                conn.execute('''
                    CREATE TABLE IF NOT EXISTS cache (
                        key TEXT PRIMARY KEY,
                        value BLOB,
                        expires_at TIMESTAMP
                    )
                ''')

                self._configure_db(conn)
                conn.commit()

            except sqlite3.Error as e:
                logger.error(f"初始化数据库失败: {str(e)}")
                raise

    def _configure_db(self, conn: sqlite3.Connection):
        """配置数据库参数"""
        conn.execute('PRAGMA journal_mode=WAL')
        conn.execute(f'PRAGMA cache_size=-{self.cache_size}')
        conn.execute(f'PRAGMA page_size={self.page_size}')
        conn.execute('PRAGMA synchronous=NORMAL')

    @contextmanager
    def _get_connection(self) -> Generator[sqlite3.Connection, None, None]:
        """从连接池获取数据库连接"""
        conn = None
        try:
            with self._pool_lock:
                if self._connection_pool:
                    conn = self._connection_pool.pop()
                else:
                    if self._active_connections < self._max_pool_size:
                        conn = self._create_new_connection()
                        self._active_connections += 1

            if conn is None:
                conn = self._create_new_connection()

            yield conn

            # 归还可用连接
            with self._pool_lock:
                if len(self._connection_pool) < self._max_pool_size:
                    self._connection_pool.append(conn)
                else:
                    conn.close()
                    self._active_connections -= 1

        except Exception as e:
            logger.error(f"获取连接失败: {str(e)}")
            if conn:
                conn.close()
            raise

    def _create_new_connection(self) -> sqlite3.Connection:
        """创建新数据库连接"""
        try:
            conn = sqlite3.connect(
                self.db_path,
                check_same_thread=False,
                timeout=10
            )
            self._configure_db(conn)
            return conn
        except sqlite3.Error as e:
            logger.error(f"创建数据库连接失败: {str(e)}")
            raise

    @lru_cache(maxsize=1000)
    def get_record_by_url(self, url: str) -> Optional[Dict]:
        """缓存最近查询的记录"""
        with self._get_connection() as conn:
            try:
                cursor = conn.execute('''
                    SELECT * FROM scrape_records 
                    WHERE url = ?
                ''', (url,))
                result = cursor.fetchone()
                return dict(result) if result else None
            except sqlite3.Error as e:
                logger.error(f"查询失败: {str(e)}")
                return None

    def insert_record(self, data: Dict) -> bool:
        """插入或更新抓取记录"""
        required_fields = {'url', 'platform', 'status'}
        if not required_fields.issubset(data.keys()):
            logger.error("缺少必要字段")
            return False

        try:
            with self._get_connection() as conn:
                conn.execute('''
                    INSERT OR REPLACE INTO scrape_records 
                    (url, platform, likes, comments, shares, status, updated_at)
                    VALUES (?, ?, ?, ?, ?, ?, ?)
                ''', (
                    data['url'],
                    data['platform'],
                    data.get('likes', 0),
                    data.get('comments', 0),
                    data.get('shares', 0),
                    data['status'],
                    datetime.now().isoformat()
                ))
                conn.commit()
                self.get_record_by_url.cache_clear()
                return True

        except sqlite3.IntegrityError as e:
            logger.error(f"唯一性约束冲突: {str(e)}")
            return False
        except sqlite3.Error as e:
            logger.error(f"数据库操作失败: {str(e)}")
            return False

    def batch_insert(self, records: List[Dict]) -> int:
        """批量插入记录"""
        success_count = 0
        try:
            with self._get_connection() as conn:
                for data in records:
                    try:
                        conn.execute('''
                            INSERT OR REPLACE INTO scrape_records 
                            (url, platform, likes, comments, shares, status, updated_at)
                            VALUES (?, ?, ?, ?, ?, ?, ?)
                        ''', (
                            data['url'],
                            data.get('platform'),
                            data.get('likes', 0),
                            data.get('comments', 0),
                            data.get('shares', 0),
                            data.get('status', '正常'),
                            datetime.now().isoformat()
                        ))
                        success_count += 1
                    except sqlite3.Error:
                        continue
                conn.commit()
                self.get_record_by_url.cache_clear()
        except sqlite3.Error as e:
            logger.error(f"批量插入失败: {str(e)}")
        return success_count

    def insert_log(self, log_type: str, content: str) -> bool:
        """插入系统日志"""
        try:
            with self._get_connection() as conn:
                conn.execute('''
                    INSERT INTO system_logs (log_type, log_content)
                    VALUES (?, ?)
                ''', (log_type, content))
                conn.commit()
                return True
        except sqlite3.Error as e:
            logger.error(f"日志插入失败: {str(e)}")
            return False

    def set_cache(self, key: str, value: Any, ttl: int = 3600) -> bool:
        """设置缓存"""
        try:
            with self._get_connection() as conn:
                conn.execute('''
                    INSERT OR REPLACE INTO cache 
                    (key, value, expires_at)
                    VALUES (?, ?, ?)
                ''', (
                    key,
                    json.dumps(value).encode('utf-8'),
                    datetime.now() + timedelta(seconds=ttl)
                ))
                conn.commit()
                return True
        except sqlite3.Error as e:
            logger.error(f"缓存设置失败: {str(e)}")
            return False

    def get_cache(self, key: str) -> Optional[Any]:
        """获取缓存"""
        try:
            with self._get_connection() as conn:
                cursor = conn.execute('''
                    SELECT value FROM cache 
                    WHERE key = ? AND expires_at > ?
                ''', (key, datetime.now()))
                result = cursor.fetchone()
                if result:
                    return json.loads(result[0])
                return None
        except sqlite3.Error as e:
            logger.error(f"缓存获取失败: {str(e)}")
            return None

    def vacuum(self):
        """执行数据库维护"""
        try:
            with self._get_connection() as conn:
                conn.execute('VACUUM')
                logger.info("数据库优化完成")
        except sqlite3.Error as e:
            logger.error(f"数据库优化失败: {str(e)}")

    def close_all(self):
        """立即强制关闭所有连接"""
        with self._pool_lock:
            # 立即关闭所有连接不等待
            while self._connection_pool:
                conn = self._connection_pool.pop()
                try:
                    conn.close()
                    self._active_connections -= 1
                except:
                    pass
            logger.info("数据库连接池已强制清空")
            # 清理查询缓存
            self.get_record_by_url.cache_clear()

    def __del__(self):
        """析构函数确保连接关闭"""
        self.close_all()


# 示例用法
if __name__ == '__main__':
    db = DatabaseManager()

    # 测试插入记录
    test_data = {
        'url': 'https://example.com',
        'platform': '微博',
        'likes': 1000,
        'comments': 200,
        'shares': 300,
        'status': '正常'
    }
    db.insert_record(test_data)

    # 查询记录
    record = db.get_record_by_url('https://example.com')
    print("查询结果:", record)

    # 测试缓存
    db.set_cache('test_key', {'data': [1, 2, 3]}, ttl=60)
    cached = db.get_cache('test_key')
    print("缓存内容:", cached)

    # 执行维护
    db.vacuum()