import hashlib
import os

from loguru import logger

from mdtool.config_reader import Config
from mdtool.db_migrator import DbMigrator
from mdtool.util import is_valid_file

from .db_Set import DBSet
from .error_handler import (
    ErrorLevel,
    FileSystemError,
    RetryStrategy,
    error_handler,
)


class WxCache:
    def __init__(self, config: Config) -> None:
        self.config = config
        db_store_file = str(self.config.get("db.storefile"))
        _, _, _ = DbMigrator(self.config).migrate()
        self.db_set = DBSet(db_store_file)
        if self.db_set is None:
            raise FileSystemError(f"Failed to open DB_STORE_FILE: {db_store_file}")
        logger.debug(f"DB_STORE_FILE: {db_store_file}")

    # 保存上传的图片的media_id 和 Media_url, Key是 url_in_text 的 md5
    @error_handler.retry(max_retries=3, strategy=RetryStrategy.LINEAR_BACKOFF)
    def set(self, url_in_text: str, media_id: str, media_url: str):
        """Set cache entry for file"""
        md5 = self.__file_digest(url_in_text)
        article_dir = str(self.config.get("articledir"))
        relpath = ""
        if url_in_text.startswith("http"):
            relpath = url_in_text
        else:
            relpath = os.path.relpath(url_in_text, article_dir)
        if self.db_set is not None:
            self.db_set.set(md5, media_id, media_url, local_url=relpath)

    def get(self, file_path: str) -> list:
        """Get cache entry for file"""
        md5 = self.__file_digest(file_path)
        if self.db_set is not None:
            return self.db_set.get(md5)
        else:
            return []

    # 更新上传的文章的media_id, 并不保存Media_url，Key 是文章文件名的 md5
    def update(
        self, file_path: str, media_id: str, media_url: str = "", local_url: str = ""
    ) -> None:
        """Update cache entry for file"""
        md5 = self.__file_digest(file_path)
        if self.db_set is not None:
            self.db_set.set(md5, media_id, media_url, local_url)

    def __file_digest(self, file_path: str) -> str:
        """Calculate file digest"""
        try:
            if not is_valid_file(file_path):
                raise FileSystemError(f"File does not exist: {file_path}")
            md5 = hashlib.md5()
            with open(file_path, "rb") as f:
                md5.update(f.read())
            return md5.hexdigest()
        except Exception as e:
            error_handler.handle_error(e, {"file": file_path}, level=ErrorLevel.ERROR)
            raise

    def is_cached(self, file_path: str) -> bool:
        """Check if file is cached"""
        try:
            result = self.get(file_path)
            return result != []
        except Exception as e:
            error_handler.handle_error(e, {"file": file_path}, level=ErrorLevel.ERROR)
            return False

    def find_cached_imgs_with_duplication(self) -> dict[str, list]:
        """
        Get the uncached images from the cache.bin file.
        """
        assets_dir = str(self.config.get("web_downloaded_dir"))
        imgs = []
        for root, _, files in os.walk(assets_dir):
            for file in files:
                if file.lower().endswith((".png", ".jpeg", ".jpg", ".gif")):
                    full_path = os.path.join(root, file)
                    imgs.append(full_path)
        cached_imgs = {}
        if self.db_set is None:
            raise FileSystemError("db_set is None")
        keys = self.db_set.all_md5s_in_cache()
        for img_path in imgs:
            md5 = self.__file_digest(img_path)
            if md5 in keys:
                if md5 in cached_imgs.keys():
                    cached_imgs[md5].append(img_path)
                else:
                    cached_imgs[md5] = [img_path]
        return cached_imgs

    def __print_out(self, output):
        for row in output:
            print(row)

    def remove_duplication(self, cached_imgs):
        output = []
        for key, filepath_list in cached_imgs.items():
            filepaths = filepath_list[1:]
            if len(filepaths) == 0:
                continue
            output.append(f"key: {key} 有重复的图片，共有{len(filepaths)}个")
            for filepath in filepaths:
                os.remove(filepath)
                output.append(f"\t已删除文件：{filepath}")
            output.append("已删除完毕。")
        self.__print_out(output)

    def close(self):
        """Close the database connection"""
        if self.db_set is not None:
            self.db_set.close()  # Close DBCACHE connection
