#
from enum import Enum
import inspect
import os

import sys
import time
import logging
import operator
from functools import wraps, reduce
import typing
from playhouse.migrate import SqliteMigrator
from playhouse.pool import PooledSqliteDatabase
from peewee import (
    BigIntegerField,
    CompositeKey,
    IntegerField,
    TextField,
    FloatField,
    DateTimeField,
    Field,
    Model,
    Metadata,
)

from typing import Type, Union
from eolcrawl.spiderutils.datetime_utils import current_timestamp, timestamp_to_date
from eolcrawl.settings import LOG_PATH
from eolcrawl.spiderutils.log_helper import get_mylogger



db_logger = get_mylogger("spider_db_models",level=logging.INFO)

DATABASE_TYPE = "sqlite"
DB_NAME = os.getenv("SPIDER_DB_PATH", ".db/rosbuiler.db")  

peewee_logfile = os.path.join(LOG_PATH, "peewee.log")

# 配置 Peewee 日志
logger = logging.getLogger("peewee")
logger.setLevel(logging.INFO)
logger.addHandler(logging.FileHandler(peewee_logfile))
# logger.addHandler(logging.StreamHandler())


# 确保数据库文件存在
if not os.path.exists(DB_NAME):
    try:
        open(DB_NAME, 'w').close()  # 创建空文件
    except Exception as e:
        db_logger.exception(f"Failed to create database file: {e}")
        raise


def singleton(cls, *args, **kw):
    instances = {}

    def _singleton():
        key = str(cls) + str(os.getpid())
        if key not in instances:
            instances[key] = cls(*args, **kw)
        return instances[key]

    return _singleton


class SqliteDatabaseLock:
    def __init__(self, lock_name, timeout=10, db=None):
        self.lock_name = lock_name
        self.timeout = int(timeout)
        self.db = db if db else DB

    def lock(self):
        try:
            # 使用 SQLite 的 BEGIN IMMEDIATE 事务来获取锁
            self.db.execute_sql("BEGIN IMMEDIATE")
            return True
        except Exception as e:
            db_logger.error(f"获取 SQLite 锁失败: {str(e)}")
            raise Exception(f"获取锁 {self.lock_name} 失败: {str(e)}")

    def unlock(self):
        try:
            # 通过提交事务来释放锁
            self.db.execute_sql("COMMIT")
            return True
        except Exception as e:
            db_logger.error(f"释放 SQLite 锁失败: {str(e)}")
            raise Exception(f"释放锁 {self.lock_name} 失败: {str(e)}")

    def __enter__(self):
        self.lock()
        return self

    def __exit__(self, exc_type, exc_val, exc_tb):
        self.unlock()

    def __call__(self, func):
        @wraps(func)
        def magic(*args, **kwargs):
            with self:
                return func(*args, **kwargs)

        return magic


class PooledDatabase(Enum):
    # MYSQL = PooledMySQLDatabase
    # POSTGRES = PooledPostgresqlDatabase
    SQLITE = PooledSqliteDatabase


class DatabaseLock(Enum):
    # MYSQL = MysqlDatabaseLock
    # POSTGRES = PostgresDatabaseLock
    SQLITE = SqliteDatabaseLock


# 添加数据库连接池配置
database_config = {
    "max_connections": 8,  # 连接池最大连接数
    "stale_timeout": 300,  # 连接超时时间(秒)
    "timeout": 20,  # 连接等待超时时间(秒)
    "pragmas": {  # SQLite 特定配置
        "journal_mode": "wal",  # 启用 WAL 模式提高并发性能
        "cache_size": -1024 * 32,  # 32MB 缓存
    },
}


@singleton
class BaseDataBase:
    def __init__(self):
        try:
            self.database_connection = PooledDatabase[DATABASE_TYPE.upper()].value(
                DB_NAME, check_same_thread=False, **database_config
            )
            db_logger.info("数据库连接池初始化成功")
        except Exception as e:
            db_logger.error(f"数据库连接池初始化失败: {str(e)}")
            raise


CONTINUOUS_FIELD_TYPE = {IntegerField, FloatField, DateTimeField}
AUTO_DATE_TIMESTAMP_FIELD_PREFIX = {
    "create",
    "start",
    "end",
    "update",
    "read_access",
    "write_access",
}


def is_continuous_field(cls: typing.Type) -> bool:
    if cls in CONTINUOUS_FIELD_TYPE:
        return True
    for p in cls.__bases__:
        if p in CONTINUOUS_FIELD_TYPE:
            return True
        elif p != Field and p != object:
            if is_continuous_field(p):
                return True
    else:
        return False


def fill_db_model_object(model_object, human_model_dict):
    for k, v in human_model_dict.items():
        attr_name = "%s" % k
        if hasattr(model_object.__class__, attr_name):
            setattr(model_object, attr_name, v)
    return model_object


def remove_field_name_prefix(field_name):
    return field_name[2:] if field_name.startswith("f_") else field_name


def auto_date_timestamp_field():
    return {f"{f}_time" for f in AUTO_DATE_TIMESTAMP_FIELD_PREFIX}


def date_string_to_timestamp(time_str, format_string="%Y-%m-%d %H:%M:%S"):
    time_array = time.strptime(time_str, format_string)
    time_stamp = int(time.mktime(time_array) * 1000)
    return time_stamp


class BaseModel(Model):
    create_time = BigIntegerField(null=True, index=True)
    create_date = DateTimeField(null=True, index=True)
    update_time = BigIntegerField(null=True, index=True)
    update_date = DateTimeField(null=True, index=True)

    def to_json(self):
        # This function is obsolete
        return self.to_dict()

    def to_dict(self):
        return self.__dict__["__data__"]

    def to_human_model_dict(self, only_primary_with: list = None):
        model_dict = self.__dict__["__data__"]

        if not only_primary_with:
            return {remove_field_name_prefix(k): v for k, v in model_dict.items()}

        human_model_dict = {}
        for k in self._meta.primary_key.field_names:
            human_model_dict[remove_field_name_prefix(k)] = model_dict[k]
        for k in only_primary_with:
            human_model_dict[k] = model_dict[f"f_{k}"]
        return human_model_dict

    @property
    def meta(self) -> Metadata:
        return self._meta

    @classmethod
    def get_primary_keys_name(cls):
        return (
            cls._meta.primary_key.field_names
            if isinstance(cls._meta.primary_key, CompositeKey)
            else [cls._meta.primary_key.name]
        )

    @classmethod
    def getter_by(cls, attr):
        return operator.attrgetter(attr)(cls)

    @classmethod
    def query(cls, reverse=None, order_by=None, **kwargs):
        filters = []
        for f_n, f_v in kwargs.items():
            attr_name = "%s" % f_n
            if not hasattr(cls, attr_name) or f_v is None:
                continue
            if type(f_v) in {list, set}:
                f_v = list(f_v)
                if is_continuous_field(type(getattr(cls, attr_name))):
                    if len(f_v) == 2:
                        for i, v in enumerate(f_v):
                            if (
                                isinstance(v, str)
                                and f_n in auto_date_timestamp_field()
                            ):
                                # time type: %Y-%m-%d %H:%M:%S
                                f_v[i] = date_string_to_timestamp(v)
                        lt_value = f_v[0]
                        gt_value = f_v[1]
                        if lt_value is not None and gt_value is not None:
                            filters.append(
                                cls.getter_by(attr_name).between(lt_value, gt_value)
                            )
                        elif lt_value is not None:
                            filters.append(
                                operator.attrgetter(attr_name)(cls) >= lt_value
                            )
                        elif gt_value is not None:
                            filters.append(
                                operator.attrgetter(attr_name)(cls) <= gt_value
                            )
                else:
                    filters.append(operator.attrgetter(attr_name)(cls) << f_v)
            else:
                filters.append(operator.attrgetter(attr_name)(cls) == f_v)
        if filters:
            query_records = cls.select().where(*filters)
            if reverse is not None:
                if not order_by or not hasattr(cls, f"{order_by}"):
                    order_by = "create_time"
                if reverse is True:
                    query_records = query_records.order_by(
                        cls.getter_by(f"{order_by}").desc()
                    )
                elif reverse is False:
                    query_records = query_records.order_by(
                        cls.getter_by(f"{order_by}").asc()
                    )
            return [query_record for query_record in query_records]
        else:
            return []

    @classmethod
    def insert(cls, __data=None, **insert):
        # 确保__data是字典类型
        if isinstance(__data, (list, tuple)):
            __data = dict(zip(cls._meta.fields.keys(), __data))

        if __data:
            __data[cls._meta.combined["create_time"]] = current_timestamp()
        if insert:
            insert["create_time"] = current_timestamp()

        return super().insert(__data, **insert)

    # update and insert will call this method
    @classmethod
    def _normalize_data(cls, data, kwargs):
        # 如果data是列表或元组，转换为字典
        if isinstance(data, (list, tuple)):
            data = dict(zip(cls._meta.fields.keys(), data))

        normalized = super()._normalize_data(data, kwargs)
        if not normalized:
            return {}

        normalized[cls._meta.combined["update_time"]] = current_timestamp()

        for f_n in AUTO_DATE_TIMESTAMP_FIELD_PREFIX:
            if (
                {f"{f_n}_time", f"{f_n}_date"}.issubset(cls._meta.combined.keys())
                and cls._meta.combined[f"{f_n}_time"] in normalized
                and normalized[cls._meta.combined[f"{f_n}_time"]] is not None
            ):
                normalized[cls._meta.combined[f"{f_n}_date"]] = timestamp_to_date(
                    normalized[cls._meta.combined[f"{f_n}_time"]]
                )

        return normalized


DB = BaseDataBase().database_connection
DB.lock = DatabaseLock[DATABASE_TYPE.upper()].value

def close_connection():
    try:
        if DB:
            DB.close_stale(age=30)
    except Exception as e:
        db_logger.exception(e)


class DataBaseModel(BaseModel):
    class Meta:
        database = DB


class DatabaseMigrator(Enum):
    # MYSQL = MySQLMigrator
    # POSTGRES = PostgresqlMigrator
    SQLITE = SqliteMigrator


@DB.connection_context()
def init_database_tables(alter_fields=[]):
    members = inspect.getmembers(sys.modules[__name__], inspect.isclass)
    table_objs = []
    create_failed_list = []
    for name, obj in members:
        if obj != DataBaseModel and issubclass(obj, DataBaseModel):
            table_objs.append(obj)
            db_logger.info(f"start create table {obj.__name__}")
            try:
                obj.create_table()
                db_logger.info(f"create table success: {obj.__name__}")
            except Exception as e:
                db_logger.exception(e)
                create_failed_list.append(obj.__name__)
    if create_failed_list:
        db_logger.info(f"create tables failed: {create_failed_list}")
        raise Exception(f"create tables failed: {create_failed_list}")

    # 迁移数据库
    migrate_db()


@DB.connection_context()
def bulk_insert_into_db(model, data_source, replace_on_conflict=False):
    DB.create_tables([model])

    for i, data in enumerate(data_source):
        current_time = current_timestamp() + i
        current_date = timestamp_to_date(current_time)
        if "create_time" not in data:
            data["create_time"] = current_time
        data["create_date"] = timestamp_to_date(data["create_time"])
        data["update_time"] = current_time
        data["update_date"] = current_date

    preserve = tuple(data_source[0].keys() - {"create_time", "create_date"})

    batch_size = 1000

    for i in range(0, len(data_source), batch_size):
        with DB.atomic():
            query = model.insert_many(data_source[i : i + batch_size])
            if replace_on_conflict:
                if isinstance(DB, PooledDatabase.SQLITE):
                    query = query.on_conflict(preserve=preserve)
                else:
                    query = query.on_conflict(conflict_target="id", preserve=preserve)
            query.execute()


def get_dynamic_db_model(base, job_id):
    return type(base.model(table_index=get_dynamic_tracking_table_index(job_id=job_id)))


def get_dynamic_tracking_table_index(job_id):
    return job_id[:8]


# https://docs.peewee-orm.com/en/latest/peewee/query_operators.html
supported_operators = {
    "==": operator.eq,
    "<": operator.lt,
    "<=": operator.le,
    ">": operator.gt,
    ">=": operator.ge,
    "!=": operator.ne,
    "<<": operator.lshift,
    ">>": operator.rshift,
    "%": operator.mod,
    "**": operator.pow,
    "^": operator.xor,
    "~": operator.inv,
}


def query_dict2expression(
    model: typing.Type[DataBaseModel],
    query: typing.Dict[str, typing.Union[bool, int, str, list, tuple]],
):
    expression = []

    for field, value in query.items():
        if not isinstance(value, (list, tuple)):
            value = ("==", value)
        op, *val = value

        field = getattr(model, f"f_{field}")
        value = (
            supported_operators[op](field, val[0])
            if op in supported_operators
            else getattr(field, op)(*val)
        )
        expression.append(value)

    return reduce(operator.iand, expression)


def query_db(
    model: Type[DataBaseModel],
    limit: int = 0,
    offset: int = 0,
    query: dict = None,
    order_by: Union[str, list, tuple] = None,
):
    data = model.select()
    if query:
        data = data.where(query_dict2expression(model, query))
    count = data.count()

    if not order_by:
        order_by = "create_time"
    if not isinstance(order_by, (list, tuple)):
        order_by = (order_by, "asc")
    order_by, order = order_by
    order_by = getattr(model, f"f_{order_by}")
    order_by = getattr(order_by, order)()
    data = data.order_by(order_by)

    if limit > 0:
        data = data.limit(limit)
    if offset > 0:
        data = data.offset(offset)

    return list(data), count

class RosLink(DataBaseModel):
    _id = TextField(help_text="ID")
    url = TextField(help_text="URL")  ##URL
    name = TextField(help_text="ros name")  ##标题
    
    release_date = TextField(null=True,default="",help_text="发布日期")  ##发布日期
    branch = TextField(null=True,default="",help_text="分支")  ##分支
    
    spider_name = TextField(help_text="爬虫名称")  ##爬虫名称
    status = IntegerField(null=False, default=-1,help_text="状态")  # 状态
    site_name = TextField(null=True,default="",help_text="来源")  ##来源

    class Meta:
        db_table = "download_link"
        primary_key = CompositeKey("_id", "spider_name")  # 定义复合主键


    # title_id = TextField(help_text="标题ID")
    # spider_name = TextField(help_text="爬虫名称")  ##爬虫名称
    # main_type = TextField(help_text="主类型")  ##主类型

class RosRepos(DataBaseModel):
    _id = TextField(help_text="ID")
    url = TextField(help_text="URL")  ##URL
    name = TextField(help_text="ros name")  #原始名称
    ros_pkg_name = TextField(help_text="ros pkg name")  ##ros包名称
    branch = TextField(null=True,default="",help_text="分支")  ##分支
    src_name = TextField(null=True,default="",help_text="源码名称")  ##源名称
    src_url = TextField(null=True,default="",help_text="源码URL")  ##源URL
    src_version = TextField(null=True,default="",help_text="源码版本")  ##源版本
    src_size = TextField(null=True,default="",help_text="源码大小")  ##源大小
    src_release_date = TextField(null=True,default="",help_text="发布日期")  ##发布日期
    
    src_verifyfile_name = TextField(null=True,default="",help_text="源码校验文件名")  ##源校验文件名
    src_verifyfile_url  = TextField(null=True,default="",help_text="源码校验文件URL")  ##源校验文件URL
    
    
    src_buildfile_name = TextField(null=True,default="",help_text="源码构建的文件名")  ##源校验文件名
    src_buildfile_url  = TextField(null=True,default="",help_text="源码构建的URL")  ##源校验文件URL
    
    release_src_name = TextField(null=True,default="",help_text="发布源名称")  ##发布源名称
    release_src_url = TextField(null=True,default="",help_text="发布源URL")  ##发布源URL
    release_pkg_name = TextField(null=True,default="",help_text="发布包名称")  ##发布包名称
    release_pkg_url = TextField(null=True,default="",help_text="发布包URL")  ##发布包URL
    release_pkg_version = TextField(null=True,default="",help_text="发布包版本")  ##发布包版本
    
    
    compare_result = TextField(null=True,default="",help_text="比较结果")  ##比较结果,相同;不同:源码和发布包发布源码不一致;不同:源码和发布包不一致;不同:源码和发布源码不一致;
    
    
    spider_name = TextField(help_text="爬虫名称")  ##爬虫名称
    status = IntegerField(null=False, default=-1,help_text="状态")  # 状态

    class Meta:
        db_table = "ros_repos"
        primary_key = CompositeKey("_id")  # 定义复合主键



class DownloadLink(DataBaseModel):
    pass
    # _id = TextField(help_text="ID")
    # url = TextField(help_text="URL")  ##URL
    # spider_name = TextField(help_text="爬虫名称")  ##爬虫名称
    # title = TextField(help_text="标题")  ##标题
    # brief = TextField(null=True,default="",help_text="概要")  ##概要
    # status = IntegerField(null=False, default=-1,help_text="状态")  # 状态
    # release_date = TextField(null=True,default="",help_text="发布日期")  ##发布日期
    # category = TextField(null=True,default="",help_text="信息类别")  ##信息类别
    # site_name = TextField(null=True,default="",help_text="来源")  ##来源
    # father_url = TextField(null=True,default="",help_text="父级URL")  ##父级URL
    # fawenzihao = TextField(null=True,default="",help_text="发文字号")  ##发文字号
    # origin = TextField(null=True,default="",help_text="来源")  ##来源

    # class Meta:
    #     db_table = "download_link"
    #     primary_key = CompositeKey("_id", "spider_name")  # 定义复合主键


# 定义 DetailContent 模型
class DetailContent(DataBaseModel):
    title_id = TextField(help_text="标题ID")
    spider_name = TextField(help_text="爬虫名称")  ##爬虫名称
    main_type = TextField(help_text="主类型")  ##主类型
    fawenjigou = TextField(null=True,default="",help_text="发文机构")  ##发文机构
    fawenzihao = TextField(null=True,default="",help_text="发文字号")  ##发文字号
    info_validity = TextField(null=False,default="",help_text="信息有效性")  ##信息有效性
    writing_date = TextField(null=True,default="",help_text="写作日期")  ##写作日期
    release_date = TextField(null=True,default="",help_text="发布日期")  ##发布日期
    title = TextField(null=False,help_text="标题")  ##标题
    content = TextField(null=False,help_text="内容")  ##内容
    affix_name = TextField(null=True,default="",help_text="附件名称")  ##附件名称
    affix_url = TextField(null=True,default="",help_text="附件URL")  ##附件URL
    affix_file_paths = TextField(null=True,default="",help_text="附件路径")  ##附件路径
    interpretation_name = TextField(null=True,default="",help_text="解读信息名")  ##解读信息名
    interpretation_url = TextField(null=True,default="",help_text="解读信息URL")  ##解读信息URL
    interpretation_content_title = TextField(null=True,default="",help_text="解读信息内容的标题")  ##解读信息内容的标题
    interpretation_content_date = TextField(null=True,default="",help_text="解读信息内容的日期")  ##解读信息内容的日期
    interpretation_content_source = TextField(null=True,default="",help_text="解读信息来源")  ##解读信息来源
    interpretation_content = TextField(null=True,default="",help_text="解读信息内容")  ##解读信息内容
    href = TextField(null=True,default="",help_text="链接")  ##链接
    status = IntegerField(null=False, default=-1,help_text="状态")  ##状态
    site_name = TextField(null=True,default="",help_text="站点名")  ##站点名
    category = TextField(null=True,default="",help_text="信息类别")  ##信息类别
    source = TextField(null=True,default="",help_text="来源")  ##来源
    source_url = TextField(null=True,default="",help_text="来源URL")  ##来源URL
    father_url = TextField(null=True,default="",help_text="父级URL")  ##父级URL
    brief = TextField(null=True,default="",help_text="概要")  ##概要
    # create_date = TextField(null=False)

    class Meta:
        db_table = "detail_content"  # 自定义表名称
        primary_key = CompositeKey("title_id", "spider_name")  # 定义复合主键


def column_exists(table_name, column_name):
    query = f"PRAGMA table_info({table_name});"
    cursor = DB.execute_sql(query)
    for row in cursor.fetchall():
        if row[1] == column_name:  # row[1] 是列名
            return True
    return False


def migrate_db():
    from playhouse.migrate import migrate
    with DB.transaction():
        # Start of Selection
        migrator = DatabaseMigrator[DATABASE_TYPE.upper()].value(DB)
        try:
            # download_link branch
            if not column_exists('download_link', 'branch'):
                migrate(migrator.add_column('download_link', 'branch', TextField(null=True,default="",help_text="分支")))
        except Exception as e:
            db_logger.exception(e)
            DB.rollback()
            raise  # 重新抛出异常，避免静默失败
    pass


if __name__ == "__main__":
    init_database_tables()
    close_connection()

    pass
