import os
import re
import time
from collections import defaultdict
from dataclasses import InitVar, dataclass, field
from enum import Enum
from typing import Dict, Union
from urllib import parse

from kombu import Exchange, Queue

from utils.crypto import Crypt

from .load_config import BASE_DIR, load

date = time.strftime("%Y-%m-%d", time.localtime())

# 免登录
ALLOW_NO_LOGIN = (
    "/ping",
    "/v1/api/image_code",
    "/static/<path:filename>",
    "/v1/api/login",
    "/v1/organizer/webhook/receive_async",
    "/v1/organizer/vm/operate"
)

# 路由白名单
RULE_WHITE_LIST = set(ALLOW_NO_LOGIN) ^ {"/v1/api/logout"}


class FlaskEnv(Enum):
    development = "development"
    test = "test"
    production = "production"
    local = "local"

    @classmethod
    def check_range(cls, env):
        return env in cls._member_names_


@dataclass
class BaseConfig:
    HOST: str = "0.0.0.0"
    PORT: str = "5000"

    DEBUG: bool = True
    TESTING: bool = False
    WTF_CSRF_ENABLED: bool = False
    ENV: InitVar = FlaskEnv.development.name

    EXPIRATION: int = 3600 * 24 * 7
    ITEMS_PER_PAGE: int = 10

    # 场景默认结束时间
    RANGE_EXPIRES: int = 3600 * 24 * 2
    # 图片验证码Redis有效期，单位：秒
    IMAGE_CODE_REDIS_EXPIRES: int = 60
    # 用户Redis有效期，单位：秒
    USER_SESSION_REDIS_EXPIRES: int = EXPIRATION

    redis_config: dict = field(default_factory=dict)
    REDIS_URL: str = field(default="redis://localhost:6379/0")
    courseware_db: int = 10

    _CONFIG: Union[Dict, None] = field(default_factory=load)

    # 是否开启用户访问策略校验
    USER_POLICY_FILTER = False

    def __post_init__(self, ENV):
        """修改冻结属性示例
        object.__setattr__(self, 'debug', _CONFIG.DEBUG)
        self.__dict__.update({
            "secret_key": _CONFIG.SECRET_KEY,
        })
        """
        if not FlaskEnv.check_range(ENV):
            raise ValueError(f"不允许的环境变量：{ENV}")
        self.DEBUG = self._CONFIG["DEBUG"]
        if ENV == "production":
            self.DEBUG = False
        elif ENV == "test":
            self.DEBUG = True
            self.TESTING = True
            self.WTF_CSRF_ENABLED = True

        os.environ["FLASK_DEBUG"] = f"{int(self.DEBUG)}"
        self.ENV = ENV

        self.HOST = self._CONFIG["HOST"]
        self.PORT = self._CONFIG["PORT"]

        self.redis_config = self._CONFIG["redis"]
        redis_host = self.redis_config["host"]
        redis_port = self.redis_config["port"]
        if redis_host and redis_port:
            user = self.redis_config.get("user", "")
            password = self.redis_config.get("password", "")
            if password and self._CONFIG["PW_IS_ENCRYPTED"]:
                try:
                    password = Crypt.decrypt(password)
                except Exception:  # noqa
                    pass
                self.redis_config["password"] = password
            self.courseware_db = self.redis_config.get("courseware_db", 0)
            self.REDIS_URL = f"redis://{user}:{parse.quote(password)}@{redis_host}:{redis_port}/{self.courseware_db}"  # noqa


@dataclass
class Config(BaseConfig):

    secret_key: str = "secret_key"

    WEB_IP: str = "172.16.3.203"
    # 镜像预设flag缓存key
    IMAGE_PRESET_FLAG: str = "IMAGE_PRESET_FLAG"
    # 缓存靶场PLC状态key
    CHECK_RANGE_PLC_STATUS_KEY: str = "CHECK_RANGE_PLC_STATUS"

    LOG_PATH: str = os.path.join(BASE_DIR, "logs")
    LOG_PATH_ERROR: str = os.path.join(LOG_PATH, f"error_{date}.log")
    LOG_PATH_INFO: str = os.path.join(LOG_PATH, f"info_{date}.log")
    LOG_FILE_MAX_BYTES: int = 100 * 1024 * 1024
    LOG_FILE_BACKUP_COUNT: int = 10

    # 文件上传下载 大小限制为64 M
    UPLOAD_FOLDER: str = os.path.join(BASE_DIR, "path")
    MAX_CONTENT_LENGTH: int = 64 * 1024 * 1024

    DATABASE = "virtualdb"
    SQLALCHEMY_DATABASE_URI: str = field(
        default="mysql://root:123456@127.0.0.1:3306/virtualdb"
    )

    # 设置sqlalchemy自动更跟踪数据库
    SQLALCHEMY_TRACK_MODIFICATIONS: bool = True
    JSON_SORT_KEYS: bool = False
    SQLALCHEMY_ECHO: bool = False
    SQLALCHEMY_COMMIT_ON_TEARDOWN: bool = True
    # 池化及超时相关
    SQLALCHEMY_ENGINE_OPTIONS = {
        "pool_size": 32,
        "pool_recycle": 200,
        "pool_pre_ping": True,
    }
    # 慢查询时间限制
    SQLALCHEMY_RECORD_QUERIES: bool = True
    SLOW_DB_QUERY_TIME: float = 0.5

    ELASTICSEARCH_URL: str = field(default="http://127.0.0.1:6666")

    project_id: int = None

    OPENSTACK: dict = field(default_factory=dict)
    ADMIN_OPENSTACK: dict = field(default_factory=dict)
    USER_TO_OPENSTACK: dict = field(default_factory=dict)
    RANGE_CONF: dict = field(default_factory=dict)
    STORAGE: dict = field(default_factory=dict)
    NETWORK: dict = field(default_factory=dict)

    node_list: dict = field(default_factory=dict)
    log: dict = field(default_factory=dict)

    def __post_init__(self, ENV):
        super().__post_init__(ENV)

        self.secret_key = self._CONFIG["SECRET_KEY"]

        mysql_config = self._CONFIG["mysql"]
        self.DATABASE = mysql_config["MYSQL_DATABASE"]
        self.SQLALCHEMY_ECHO = mysql_config.get("SQLALCHEMY_ECHO", False)

        passwd = (
            Crypt.decrypt(mysql_config["MYSQL_PASSWORD"])
            if self._CONFIG["PW_IS_ENCRYPTED"]
            else mysql_config["MYSQL_PASSWORD"]
        )
        self.SQLALCHEMY_DATABASE_URI = (
            f'mysql://{mysql_config["MYSQL_USER"]}:'
            f"{parse.quote(passwd)}@"
            f'{mysql_config["MYSQL_HOST"]}:{mysql_config["MYSQL_PORT"]}/'
            f'{mysql_config["MYSQL_DATABASE"]}'
        )
        self.SQLALCHEMY_ENGINE_OPTIONS = mysql_config["SQLALCHEMY_ENGINE_OPTIONS"]

        self.OPENSTACK = self._CONFIG["OPENSTACK"]
        self.ADMIN_OPENSTACK = self._CONFIG.get("ADMIN_OPENSTACK", defaultdict(dict))
        self.USER_TO_OPENSTACK = self._CONFIG["USER_TO_OPENSTACK"]
        self.ZSTACK_CLOUD = self._CONFIG["ZSTACK_CLOUD"]
        self.project_id = next(iter(self.USER_TO_OPENSTACK))
        self.RANGE_CONF = self._CONFIG["Range"]
        self.NETWORK = self._CONFIG["NETWORK"]
        if not self.NETWORK.get("CIDRS", None):
            self.NETWORK["CIDRS"] = [
                "192.168.1.0/24",
                "192.168.2.0/24",
                "192.168.3.0/24",
                "192.168.4.0/24",
                "10.0.0.0/24",
            ]

        self.log = self._CONFIG["log"]

        self.WEB_IP = self._CONFIG["WEB_IP"] or self.WEB_IP

        self._CONFIG = None


@dataclass
class CeleryConfig(BaseConfig):

    celery_name: str = BASE_DIR.name

    broker_url: str = field(default="redis://localhost:6379/0")
    result_backend: str = field(default="redis://localhost:6379/0")

    # 任务队列
    task_queues: set = None
    # 任务自动路由
    task_routes: dict = None

    # 任务防丢失
    task_acks_late: bool = True
    task_reject_on_worker_lost: bool = True
    task_acks_on_failure_or_timeout: bool = True

    # 结果存储时间
    result_expires: int = 60 * 60 * 24
    # worker自动重启 减缓celery内存泄露
    worker_max_tasks_per_child: int = 200
    # 达到800内存限制后重启
    worker_max_memory_per_child: int = 800000

    # 任务预取
    worker_prefetch_multiplier: int = 1

    # 压缩
    task_compression: str = "gzip"
    result_compression: str = "gzip"

    # 超时相关
    task_time_limit: int = 60 * 2
    task_soft_time_limit: int = 5 * 60
    # 任务执行可见性超时(redis适应)
    broker_transport_options: dict = field(default_factory=dict)

    # 链接池限制 default 10
    broker_pool_limit: int = 36

    # 序列化
    accept_content = ["json", "pickle"]
    task_serializer: str = "pickle"
    result_serializer: str = "pickle"

    # 时区修改
    timezone: str = "Asia/Shanghai"

    # DEBUG
    # 永远同步运行
    task_always_eager: bool = False
    # task 结束后重新引发异常
    task_remote_tracebacks: bool = False

    def __post_init__(self, ENV):
        super().__post_init__(ENV)

        config = self._CONFIG["celery"]
        celery_broker_db = config.pop("CELERY_BROKER_DB", "3")
        celery_result_broker_db = config.pop("CELERY_RESULT_BACKEND_DB", "4")
        _redis_conn = self.REDIS_URL.strip(f"{self.courseware_db}")

        self.task_time_limit = config.pop("task_time_limit", self.task_time_limit)
        self.task_soft_time_limit = config.pop(
            "task_soft_time_limit", self.task_soft_time_limit
        )
        default_broker_transport_options = {"visibility_timeout": 3600 * 24}
        self.broker_transport_options = config.pop(
            "broker_transport_options", default_broker_transport_options
        )

        self.task_acks_late = config.pop("task_acks_late", self.task_acks_late)
        self.task_reject_on_worker_lost = config.pop(
            "task_reject_on_worker_lost", self.task_reject_on_worker_lost
        )
        self.task_acks_on_failure_or_timeout = config.pop(
            "task_acks_on_failure_or_timeout", self.task_acks_on_failure_or_timeout
        )

        self.accept_content = config.pop("accept_content", self.accept_content)
        self.task_serializer = config.pop("task_serializer", self.task_serializer)
        self.result_serializer = config.pop("result_serializer", self.result_serializer)

        self.timezone = config.pop("timezone", self.timezone)

        self.task_always_eager = config.pop("task_always_eager", self.task_always_eager)
        self.task_remote_tracebacks = config.pop(
            "task_remote_tracebacks", self.task_remote_tracebacks
        )

        self.worker_max_tasks_per_child = config.pop(
            "worker_max_tasks_per_child", self.worker_max_tasks_per_child
        )
        self.worker_max_memory_per_child = config.pop(
            "worker_max_memory_per_child", self.worker_max_memory_per_child
        )
        self.worker_prefetch_multiplier = config.pop(
            "worker_prefetch_multiplier", self.worker_prefetch_multiplier
        )
        self.task_compression = config.pop("task_compression", self.task_compression)
        self.result_compression = config.pop(
            "worker_max_tasks_per_child", self.result_compression
        )

        self.broker_url = _redis_conn + celery_broker_db
        self.result_backend = _redis_conn + celery_result_broker_db

        task_default_queue = config.pop(
            "CELERY_TASK_DEFAULT_QUEUE", f"{BASE_DIR.name}_celery"
        )
        task_default_routing_key = config.pop(
            "CELERY_TASK_DEFAULT_ROUTING_KEY", f"{BASE_DIR.name}_celery"
        )
        task_queue = {
            Queue(task_default_queue, routing_key=task_default_routing_key),
        }
        for item in config.pop("celery_task_queues"):
            task_queue.add(
                Queue(
                    item["queue"],
                    exchange=Exchange(item.get("exchange", item["queue"])),
                    routing_key=item["routing_key"],
                )
            )
        self.task_queues = task_queue

        task_routes = {}
        for item in config.pop("celery_task_routes"):
            _key = (
                re.compile(item["path"]) if item.get("is_re", False) else item["path"]
            )
            route_info = {"queue": item["queue"]}
            if item.get("routing_key"):
                route_info.update({"routing_key": item["routing_key"]})
            if item.get("exchange"):
                route_info.update({"exchange": item["exchange"]})
            if item.get("serializer"):
                route_info.update({"serializer": item["serializer"]})
            task_routes.update({_key: route_info})
        self.task_routes = task_routes

        self._CONFIG = None

    def __iter__(self):
        return iter(vars(self).values())


# Flask 项目配置
flask_config: Config = Config(ENV=os.getenv("FLASK_CONFIG", default="development"))
# Celery 配置
celery_config: CeleryConfig = CeleryConfig(
    ENV=os.getenv("FLASK_CONFIG", default="development")
)
