import atexit
import contextvars
import logging
import logging.handlers
import os
import pathlib
import queue
import sys
import threading
import time
import traceback
from dataclasses import dataclass
from typing import Any, List

BASE_DIR = pathlib.Path(os.getcwd()).resolve()

from config import (
    SNAIL_GROUP_NAME,
    SNAIL_HOST_IP,
    SNAIL_HOST_PORT,
    SNAIL_LOG_BUFFER_SIZE,
    SNAIL_LOG_FORMAT,
    SNAIL_LOG_INTERVAL,
    SNAIL_LOG_LEVEL,
    SNAIL_LOG_LOCAL_PATH,
    SNAIL_LOG_LOCAL_RETATION,
    SNAIL_NAMESPACE,
)
from schemas import JobLogTask, JobLogTaskDTO, TaskLogFieldDTO


@dataclass
class SnailLogContext:
    """日志上下文，存放于thread local，便于传递当前执行任务的专用变量"""

    jobId: int
    taskId: int
    taskBatchId: int


class SnailHttpHandler(logging.Handler):
    """基于时间滑动窗口、队列缓存的日期处理器，用于远程上报日志"""

    @staticmethod
    def formatExcInfo(exc_info: Any):
        if (
            isinstance(exc_info, tuple)
            and len(exc_info) == 3
            and isinstance(exc_info[1], Exception)
        ):
            errors = traceback.format_exception(exc_info[1])
            # 删除当前函数(execute_wrapper)的调用栈
            errors.pop(1)
            return "\n".join(errors)

    # 日志格式转换规则
    RECORD_MAPPINGS = (
        ("time_stamp", lambda r: str(int(r.created * 1000))),
        ("level", lambda r: r.levelname),
        ("thread", lambda r: r.threadName),
        ("message", lambda r: r.msg),
        ("location", lambda r: f"{r.module}:{r.funcName}:{r.lineno}"),
        ("throwable", lambda r: SnailHttpHandler.formatExcInfo(r.exc_info)),
    )

    def __init__(self, capacity=2, interval=10):
        super().__init__()
        self.capacity = capacity
        self.interval = interval
        self.buffer = queue.Queue(capacity)
        self.lock = threading.Lock()
        self.timer = None
        self.start_timer()

    def _transform(self, record: logging.LogRecord) -> JobLogTask:
        """转换日志结构

        Args:
            record (logging.LogRecord): logging标准日志结构

        Returns:
            JobLogTask: SnailJob 服务器日志格式
        """

        field_list: List[TaskLogFieldDTO] = []
        for key, mapper in SnailHttpHandler.RECORD_MAPPINGS:
            assert callable(mapper), "Mapper is not callable"
            field_list.append(TaskLogFieldDTO(name=key, value=mapper(record)))
        field_list.append(TaskLogFieldDTO(name="host", value=SNAIL_HOST_IP))
        field_list.append(TaskLogFieldDTO(name="port", value=SNAIL_HOST_PORT))

        log_context = SnailLog.get_context()
        job_log_task = JobLogTask(
            logType="JOB",
            namespaceId=SNAIL_NAMESPACE,
            groupName=SNAIL_GROUP_NAME,
            realTime=int(time.time() * 1000),
            fieldList=field_list,
            jobId=log_context.jobId,
            taskBatchId=log_context.taskBatchId,
            taskId=log_context.taskId,
        )

        return job_log_task

    def emit(self, record: logging.LogRecord):
        # 1. 转化日志元素
        dto = self._transform(record)

        # 2. 如果当前缓冲区为空，启动计时器
        if self.buffer.empty():
            self.start_timer()

        # 3. 将日志存放到缓冲区
        self.buffer.put(dto)

        # 4. 如果缓冲区满，则冲洗
        if self.buffer.full():
            self.flush()

    def flush(self):
        """冲洗缓冲区，并发送到远程服务器"""

        items: List[TaskLogFieldDTO] = []
        while not self.buffer.empty():
            items.append(self.buffer.get())

        if items:
            self._send(items)

    def _send(self, items: List[TaskLogFieldDTO]):
        """推送日志到远程服务器

        Args:
            items (List[TaskLogFieldDTO]): 日志元素
        """
        # 延迟import，解决循环import
        from rpc import send_batch_log_report

        job_log_task_dto = JobLogTaskDTO(reqId=time.time_ns(), args=[items])
        send_batch_log_report(job_log_task_dto)

    def start_timer(self):
        """启动时间滑动窗口定时器"""

        if self.timer:
            self.timer.cancel()
        self.timer = threading.Timer(self.interval, self.flush)
        self.timer.start()

    def close(self):
        if self.timer:
            self.timer.cancel()
        self.flush()
        super().close()


class SnailLog:
    """Snail Job 日志门面"""

    _context = contextvars.ContextVar("SnailLog_context")

    LOCAL = logging.getLogger("SnailJob Local Logger")
    REMOTE = logging.getLogger("SnailJob Remote Logger")

    @staticmethod
    def set_context(data: SnailLogContext):
        SnailLog._context.set(data)

    @staticmethod
    def get_context() -> SnailLogContext:
        return SnailLog._context.get()

    @staticmethod
    def config_loggers():
        # 全局日志格式化器
        formatter = logging.Formatter(SNAIL_LOG_FORMAT)

        # handler: 文件
        file_handler = logging.handlers.TimedRotatingFileHandler(
            filename=BASE_DIR / SNAIL_LOG_LOCAL_PATH,
            when="D",
            backupCount=SNAIL_LOG_LOCAL_RETATION,
        )
        file_handler.setFormatter(formatter)

        # handler: 控制台
        stream_handler = logging.StreamHandler(sys.stdout)
        stream_handler.setFormatter(formatter)

        # handler: http
        http_handler = SnailHttpHandler(
            capacity=SNAIL_LOG_BUFFER_SIZE,
            interval=SNAIL_LOG_INTERVAL,
        )
        # 程序退出时关闭 handler
        atexit.register(lambda: http_handler.close())

        SnailLog.REMOTE.setLevel(SNAIL_LOG_LEVEL)
        SnailLog.REMOTE.parent = None
        SnailLog.REMOTE.addHandler(file_handler)
        SnailLog.REMOTE.addHandler(http_handler)
        SnailLog.REMOTE.addHandler(stream_handler)

        SnailLog.LOCAL.setLevel(SNAIL_LOG_LEVEL)
        SnailLog.LOCAL.parent = None
        SnailLog.LOCAL.addHandler(file_handler)
        SnailLog.LOCAL.addHandler(stream_handler)


# 配置日志
SnailLog.config_loggers()


# 需要注释掉 emit 函数的 _send调用
if __name__ == "__main__":

    def log_messages(thread_id):
        SnailLog.set_context(
            SnailLogContext(
                taskId=thread_id,
                jobId=thread_id,
                taskBatchId=thread_id,
            )
        )

        for i in range(10):
            SnailLog.REMOTE.info(f"Thread {thread_id} - log message {i}")
            time.sleep(0.5)

    SnailLog.LOCAL.info("hello")

    # 创建并启动多个线程
    threads = []
    for i in range(6):
        thread = threading.Thread(target=log_messages, args=(i,))
        threads.append(thread)
        thread.start()

    # 等待所有线程完成
    for thread in threads:
        thread.join()
