import os
import json
import logging
import hashlib
from Crypto.Cipher import AES
from Crypto.Util.Padding import pad, unpad
import os
import logging
import gzip
from common.util.exec_utils import exec_cp_cmd
LOGGER = logging.getLogger(__name__)

############################### 实现数据重删 ################################
############################################################################
class Deduplicator:
    def __init__(self, hash_table_path="backup_hash_table.json"):
        self._hash_table_path = hash_table_path
        self._hash_table = self._load_hash_table()

    def _load_hash_table(self):
        """加载上一次备份的哈希表"""
        if os.path.exists(self._hash_table_path):
            try:
                with open(self._hash_table_path, "r") as f:
                    return json.load(f)
            except Exception as e:
                LOGGER.error(f"Failed to load hash table: {str(e)}")
        return {}

    def _save_hash_table(self):
        """保存当前备份的哈希表"""
        try:
            with open(self._hash_table_path, "w") as f:
                json.dump(self._hash_table, f)
        except Exception as e:
            LOGGER.error(f"Failed to save hash table: {str(e)}")

    def _calculate_file_hash(self, file_path):
        """计算文件的哈希值（SHA256）"""
        sha256 = hashlib.sha256()
        try:
            with open(file_path, "rb") as f:
                while chunk := f.read(8192):
                    sha256.update(chunk)
            return sha256.hexdigest()
        except Exception as e:
            LOGGER.error(f"Failed to calculate hash for {file_path}: {str(e)}")
            return None

    def process(self, source, target,user_name=None):
        """数据重删：检查文件是否重复，并创建硬链接或进行备份"""
        file_hash = self._calculate_file_hash(source)
        if not file_hash:
            return False

        if file_hash in self._hash_table:
            # 文件已存在，创建硬链接
            existing_file = self._hash_table[file_hash]
            try:
                os.link(existing_file, target)
                LOGGER.info(f"Created hard link for {source} -> {target}")
                return True
            except Exception as e:
                LOGGER.error(f"Failed to create hard link for {source}: {str(e)}")
                return False
        else:
            # 文件未备份，进行备份
            try:
                os.makedirs(os.path.dirname(target), exist_ok=True)
                ret = exec_cp_cmd(source, target, user_name, "-r",
                                   is_check_white_list=False)
                if not ret:
                    LOGGER.error(f"Failed to exec cp cmd, job id: {self._job_id}.")
                    return False
                # 更新哈希表
                self._hash_table[file_hash] = target
                LOGGER.info(f"Backed up {source} -> {target}")
                return True
            except Exception as e:
                LOGGER.error(f"Failed to back up {source}: {str(e)}")
                return False

    def finalize(self):
        """保存哈希表"""
        self._save_hash_table()

############################### 实现数据压缩 ################################
############################################################################
class Compressor:
    def process(self, source, target):
        """压缩文件（流式读写避免爆内存）"""
        try:
            with open(source, "rb") as src, gzip.open(target, "wb") as dst:
                while True:
                    chunk = src.read(8192)  # 可调 chunk 大小
                    if not chunk:
                        break
                    dst.write(chunk)
            LOGGER.info(f"Compressed {source} -> {target}")
            return True
        except Exception as e:
            LOGGER.error(f"Failed to compress {source}: {str(e)}")
            return False


############################### 实现数据加密 ################################
############################################################################
class Encryptor:
    def __init__(self, key):
        self._key = key.encode("utf-8")[:32]  # AES-256 requires a 32-byte key

    def process(self, source, target):
        """加密文件"""
        try:
            cipher = AES.new(self._key, AES.MODE_CBC)
            with open(source, "rb") as src, open(target, "wb") as dst:
                dst.write(cipher.iv)  # 写入初始化向量
                while chunk := src.read(8192):
                    if len(chunk) % 16 != 0:
                        chunk = pad(chunk, AES.block_size)
                    dst.write(cipher.encrypt(chunk))
            LOGGER.info(f"Encrypted {source} -> {target}")
            return True
        except Exception as e:
            LOGGER.error(f"Failed to encrypt {source}: {str(e)}")
            return False

############################### 备份主函数 ##################################
############################################################################
class BackupManager:
    def __init__(self, job_id, os_user_name, encryption_key):
        self._job_id = job_id
        self._os_user_name = os_user_name
        self._deduplicator = Deduplicator()
        self._compressor = Compressor()
        self._encryptor = Encryptor(encryption_key)

    def _backup_directory(self, source, target,user_name=None):
        """递归备份目录"""
        for root, dirs, files in os.walk(source):
            # 构建目标目录结构
            relative_path = os.path.relpath(root, source)
            target_dir = os.path.join(target, relative_path)
            os.makedirs(target_dir, exist_ok=True)

            # 备份文件
            for file in files:
                source_file = os.path.join(root, file)
                target_file = os.path.join(target_dir, file)
                # 执行重删、压缩、加密
                if not self._deduplicator.process(source_file, target_file,user_name=user_name):
                    return False
                if not self._compressor.process(target_file, target_file + ".gz"):
                    return False
                if not self._encryptor.process(target_file + ".gz", target_file + ".gz.enc"):
                    return False
        return True

    def backup_files(self, source, target,user_name=None):
        """备份文件，集成重删、压缩、加密功能"""
        if not source or not target:
            LOGGER.error(f"Param error, job id: {self._job_id}.")
            return False

        if not os.path.exists(source):
            LOGGER.error(f"Source directory does not exist: {source}")
            return False

        # 递归备份目录
        if not self._backup_directory(source, target,user_name=user_name):
            LOGGER.error(f"Failed to back up directory: {source}")
            return False

        # 保存哈希表
        self._deduplicator.finalize()
        LOGGER.info(f"Backup completed successfully, job id: {self._job_id}.")
        return True
