import asyncio
from collections import namedtuple
from typing import Any, Callable
import logging

from pydantic_settings import BaseSettings
from os import path

from utils.ip import IpUtil

logger = logging.getLogger(__name__)

ROOT_PATH = path.dirname(path.abspath(path.join(__file__, '../..')))

standard_format = '[%(asctime)s][%(threadName)s:%(thread)d][log_name:%(name)s][%(filename)s:%(lineno)d]' \
                  '[%(levelname)s]: %(message)s'  # 其中name为getlogger指定的名字

simple_format = '%(levelname)s[%(asctime)s][%(filename)s:%(lineno)d]%(message)s'

test_format = '%(asctime)s] %(message)s'

# 定义 NamedTuple 类
MQTTConfig = namedtuple('MQTTConfig', ['broker', 'port', 'client_id', 'username', 'password'])


class BaseConfig(BaseSettings):
    """应用配置
            server目录为后端项目根目录, 在该目录下创建 ".env" 文件, 写入环境变量(默认大写)会自动加载
                eg.
                .env 文件内写入:
                    ENV=dev #支持dev、prod、test,默认或没有.env文件为prod
        """
    # 上传文件路径，不能使用upload/或upload/images，因为上传接口是upload/images有冲突
    upload_directory: str = path.join(ROOT_PATH, 'uploads')
    # 上传图片限制
    upload_image_size: int = 1024 * 1024 * 10
    # 上传视频限制
    upload_video_size: int = 1024 * 1024 * 30
    # 上传文件限制
    upload_file_size: int = 1024 * 1024 * 300
    # 上传图片扩展
    upload_image_ext: set = {'png', 'jpg', 'jpeg', 'gif', 'ico', 'bmp'}
    # 上传视频扩展
    upload_video_ext: set = {'mp4', 'mp3', 'avi', 'flv', 'rmvb', 'mov'}
    # 上传路径URL前缀
    upload_prefix: str = '/routers/uploads'
    # 文件存储引擎
    storage_engine: str = "local"
    # 文件存储配置
    storage_config: dict = {}

    # 数据源配置
    database_url: str = 'mysql+asyncmy://root:root@localhost:3306/meeting-minutes-assistant?charset=utf8mb4'
    # 数据库连接池最小值
    database_pool_min_size: int = 5
    # 数据库连接池最大值
    database_pool_max_size: int = 500
    # 连接池连接回收时间（秒）,如果一个连接在连接池中存在的时间超过 pool_recycle，这个连接将在下一次使用前被回收并重新创建。这可以防止由于连接长时间存在而导致的连接问题，例如数据库服务器重启或网络中断。
    database_pool_recycle: int = 1800
    # 数据库执行sql语句打印日志，生产环境最好关闭
    database_echo: bool = True

    # 是否启用静态资源
    enabled_static: bool = True
    # 静态资源URL路径
    static_path: str = '/api/static'
    # 静态资源本地路径
    static_directory: str = path.join(ROOT_PATH, 'static')

    # CORS 跨域资源共享
    # 允许跨域的源列表 eg. '["*"]'   '["http://localhost", "http://localhost:8080", "https://www.example.org"]'
    cors_allow_origins: str = '["*"]'

    # 模式
    mode: str = 'dev'  # dev, prod

    # 全局配置
    # 版本
    version: str = 'v1.3.5'
    # 项目根路径
    root_path: str = ROOT_PATH
    # 默认请求超时
    request_timeout: int = 60
    # Mysql表前缀
    table_prefix: str = 'la_'
    # 时区
    timezone: str = 'Asia/Shanghai'
    # 日期时间格式
    datetime_fmt: str = '%Y-%m-%d %H:%M:%S'
    # 系统加密字符
    secret: str = 'UVTIyzCy'

    # Redis源配置
    redis_url: str = 'redis://localhost:6379'
    # Redis键前缀
    redis_prefix: str = 'meetings:'
    # 短信验证码
    redis_sms_code: str = 'smsCode:'

    # 禁止修改操作 (限制POST请求)
    disallow_modify: bool = False
    port: int = 8801
    # 当前域名,拼接文件路径需要用到,如果文件不是存储在本地则会用到对应存储引擎中的domain,这个可以默认为服务器IP
    domain: str = f'http://{IpUtil.get_host_ip()}:{port}'
    storage_map_cache: dict = {}

    baidu_ai: dict = {
        'QIANFAN_ACCESS_KEY': 'ERiiQIMgxkdh4t7MpyuHqPz5',
        'QIANFAN_SECRET_KEY': 'SibczfUF5FQoGjkqwZcKlFkd0nxef6KW',
        'model': 'ERNIE-Speed-128K',
    }

    # 日志相关
    # log配置字典
    LOGGING_DIC: dict = {
        'version': 1,
        'disable_existing_loggers': False,
        'formatters': {
            'standard': {
                'format': standard_format
            },
            'simple': {
                'format': simple_format
            },
            'test': {
                'format': test_format
            },
        },
        'filters': {},  # 过滤日志
        'handlers': {
            # 打印到终端的日志
            'console': {
                'level': 'DEBUG',
                'class': 'logging.StreamHandler',  # 打印到屏幕
                'formatter': 'simple'
            },
            # 打印到文件的日志,收集info及以上的日志
            'file': {
                'level': 'INFO',
                'class': 'logging.handlers.RotatingFileHandler',  # 保存到文件
                'formatter': 'standard',
                'filename': './logs/info.log',  # 日志文件、所在路径
                'maxBytes': 1024 * 1024 * 10,  # 日志大小 10M
                'backupCount': 5,
                'encoding': 'utf-8',  # 日志文件的编码，再也不用担心中文log乱码了
            },
            'socket_file': {
                'level': 'DEBUG',
                'class': 'logging.handlers.RotatingFileHandler',  # 保存到文件
                'formatter': 'standard',
                'filename': './logs/socket.log',
                'encoding': 'utf-8',
            },
            'mqtt_file': {
                'level': 'INFO',
                'class': 'logging.handlers.RotatingFileHandler',
                'formatter': 'standard',
                'filename': './logs/mqtt.log',
                'maxBytes': 1024 * 1024 * 10,
                'backupCount': 5,
                'encoding': 'utf-8',
            },
            'scheduler_file': {
                'level': 'INFO',
                'class': 'logging.handlers.RotatingFileHandler',
                'formatter': 'standard',
                'filename': './logs/scheduler.log',
                'maxBytes': 1024 * 1024 * 10,
                'backupCount': 5,
                'encoding': 'utf-8',
            }
        },
        'loggers': {
            # logging.getLogger(__name__)拿到的logger配置  空字符串作为键 能够兼容所有的日志
            '': {
                'handlers': ['file', 'console'],  # 这里把上面定义的两个handler都加上，即log数据既写入文件又打印到屏幕
                'level': 'INFO',
                'propagate': True,  # 向上（更高level的logger）传递
            },  # 当键不存在的情况下 (key设为空字符串)默认都会使用该k:v配置
            'socket': {
                'handlers': ['socket_file'],
                'level': 'DEBUG',
                'propagate': False,
            },
            'mqtt': {
                'handlers': ['mqtt_file'],
                'level': 'DEBUG',
                'propagate': False,
            },
            'scheduler': {
                'handlers': ['scheduler_file'],
                'level': 'DEBUG',
                'propagate': False,
            },
        },
    }

    # ===== 运行时更新参数 =====
    def set(self, key: str, value: Any) -> None:
        """同步设置属性并触发回调。"""
        with self._lock:
            old = getattr(self, key, None)
            setattr(self, key, value)
            # call callbacks safely
            for cb in list(self._on_change_callbacks):
                try:
                    cb(key, old, value)
                except Exception:
                    logger.exception("on_change callback failed for %s", key)

    # ===== runtime update API (async) =====
    async def aset(self, key: str, value: Any) -> None:
        async with self._async_lock:
            old = getattr(self, key, None)
            setattr(self, key, value)
            for cb in list(self._on_change_callbacks):
                # callbacks may be sync or async
                try:
                    res = cb(key, old, value)
                    if asyncio.iscoroutine(res):
                        await res
                except Exception:
                    logger.exception("on_change callback failed for %s", key)

    def register_on_change(self, callback: Callable[[str, Any, Any], None]) -> None:
        """注册一个配置变更回调： callback(key, old, new)"""
        self._on_change_callbacks.append(callback)

    async def refresh_from_db(self, loader_callable: Callable[..., Any], *loader_args, **loader_kwargs) -> None:
        """
        从 DB 加载配置并直接写入 settings 属性（把 key -> value 直接 setattr）。
        loader_callable: async callable(session, *args, **kwargs) -> dict[key,value]
        """
        try:
            runtime_map = await loader_callable(*loader_args, **loader_kwargs)
            if not isinstance(runtime_map, dict):
                logger.warning("加载程序没有返回dict；不刷新")
                return
            # apply via aset to trigger async lock and callbacks
            for k, v in runtime_map.items():
                await self.aset(k, v)
            logger.info("设置：从DB中刷新 %d 键", len(runtime_map))
        except Exception:
            logger.exception("设置。从数据库刷新失败")
