import hashlib
import os
import shutil
import threading
import time
from pathlib import Path

from watchdog.events import FileSystemEventHandler
from watchdog.observers import Observer
from app.core.config import settings
from app.core.event import EventManager
from app.db.archive_oper import ArchiveOper
from app.log import logger
from app.schemas import EventType
from app.utils.singleton import Singleton
import imghdr


def is_image(file_path):
    return imghdr.what(file_path) is not None


class ArchiveMonitorHandler(FileSystemEventHandler):
    # 计时器
    __reload_timer = None
    # 防抖时间间隔
    __debounce_interval = 0.5
    # 最近一次修改时间
    __last_modified = 0
    # 修改间隔
    __timeout = 2

    def on_modified(self, event):
        """
        档案文件修改后重载
        """
        if event.is_directory:
            return
        # 使用 pathlib 处理文件路径，跳过非 .py 文件以及 pycache 目录中的文件
        file_path = Path(event.src_path)

        current_time = time.time()
        if current_time - self.__last_modified < self.__timeout:
            return
        self.__last_modified = current_time
        try:
            # 防抖处理，通过计时器延迟加载
            if self.__reload_timer:
                self.__reload_timer.cancel()
            self.__reload_timer = threading.Timer(self.__debounce_interval, self.__reload_archive, [file_path])
            self.__reload_timer.start()
        except Exception as e:
            logger.error(f"插件文件修改后重载出错：{str(e)}")

    @staticmethod
    def __reload_archive(file_path):
        """
        重新解析档案
        """
        logger.info(f"档案 {file_path} 修改，重新解析...")
        ArchiveMonitor().reload_archive(file_path)
        # try:
        #     logger.info(f"档案 {file_path} 修改，重新解析...")
        #     ArchiveMonitor().reload_archive(file_path)
        # except Exception as e:
        #     logger.error(f"档案修改后解析出错：{str(e)}")


def calculate_file_md5(file_path):
    """
    计算文件md5
    """
    md5 = hashlib.md5()
    with open(file_path, 'rb') as f:
        for chunk in iter(lambda: f.read(4096), b''):
            md5.update(chunk)
    return md5.hexdigest()


_UNPACK_FORMATS = {
    "cbz": "zip"
}


class ArchiveMonitor(metaclass=Singleton):
    """
    档案监听器
    """

    # 监听器
    _observer: Observer = None

    def __init__(self):
        self.archive_oper = ArchiveOper()
        self.eventmanager = EventManager()
        # 监听档案修改
        self.__start_monitor()

    def __start_monitor(self):
        """
        监听插件文件修改
        """
        logger.info(f"开始监听档案文件...{settings.ARCHIVE_PATH}")
        monitor_handler = ArchiveMonitorHandler()
        self._observer = Observer()
        self._observer.schedule(monitor_handler, settings.ARCHIVE_PATH, recursive=True)
        self._observer.start()

    def stop_monitor(self):
        """
        停止监测插件修改
        """
        # 停止监测
        if self._observer:
            logger.info("正在停止监听档案文件...")
            self._observer.stop()
            self._observer.join()
            logger.info("停止档案文件修改监测成功")

    def reload_archive(self, file_path):
        """
        将档案添加到数据库
        """
        [file_name, file_ext] = os.path.basename(file_path).split(".")

        # 当文件是支持文件类型
        if file_ext in settings.SUPPORTED_ARCHIVE_EXT:
            print("计算文件md5")
            md5 = calculate_file_md5(file_path)
            print(f"md5: {md5}")
            # 判断解压路径是否存在
            unpack_path = os.path.join(settings.ARCHIVE_CACHE_PATH, md5[0: 2], md5)
            if not os.path.exists(unpack_path):
                print("开始解压")

                print(unpack_path)
                shutil.unpack_archive(file_path, unpack_path, _UNPACK_FORMATS.get(file_ext) or None)
                print("解压成功")
                pages = 0
                files = [os.path.join(dir_path, file) for dir_path, dir_names, files in os.walk(unpack_path) for file in
                         files]
                for file in files:
                    if is_image(file):
                        pages = pages + 1

                relative_path = os.path.relpath(file_path, settings.ARCHIVE_PATH)
                file_size = os.path.getsize(file_path)
                # 插入数据库
                print("插入数据库")
                archive_id = self.archive_oper.add(title=file_name, title_display=file_name, md5=md5,
                                                   file_path=str(relative_path),
                                                   file_name=file_name, file_size=file_size, unpack_path=unpack_path,
                                                   pages=pages)
                print("发送新增档案事件")
                # 发送事件
                self.eventmanager.send_event(
                    EventType.ArchiveAdded,
                    {
                        "type": "archive",
                        "id": archive_id,
                        "title": file_name,
                        "md5": md5,
                        "file_path": str(file_path),
                        "file_name": file_name,
                        "file_size": file_size,
                        "unpack_path": unpack_path,
                        "pages": pages,
                    }
                )
            else:
                print("档案缓存已存在")
        else:
            print("不支持的文件格式")
