import os
import json
import hashlib
import shutil
from pathlib import Path
from datetime import datetime
from typing import Dict, List, Optional, Union

from minio import Minio
from minio.error import S3Error
from dotenv import load_dotenv


class HashStorage:
    """基于MinIO的去重网络存储类"""
    
    def __init__(self, env_file: Optional[str] = None, access_key: Optional[str] = None, 
                 secret_key: Optional[str] = None, url: Optional[str] = None, 
                 bucket: Optional[str] = None):
        """初始化HashStorage
        
        Args:
            env_file: .env文件路径，默认为当前目录下的.env
            access_key: MinIO访问密钥，如果提供则优先使用
            secret_key: MinIO秘密密钥，如果提供则优先使用
            url: MinIO服务器URL，如果提供则优先使用
            bucket: MinIO存储桶名称，如果提供则优先使用
        """
        # 优先使用直接传入的参数，否则从环境变量读取
        if access_key and secret_key and url and bucket:
            self.access_key = access_key
            self.secret_key = secret_key
            self.url = url
            self.bucket = bucket
        else:
            # 从环境变量读取
            if env_file:
                load_dotenv(env_file)
            else:
                load_dotenv()
                
            self.access_key = access_key or os.getenv('ACCESS_KEY')
            self.secret_key = secret_key or os.getenv('SECRET_KEY')
            self.url = url or os.getenv('URL')
            self.bucket = bucket or os.getenv('BUCKET')
        
        if not all([self.access_key, self.secret_key, self.url, self.bucket]):
            raise ValueError("缺少必要的MinIO配置：ACCESS_KEY, SECRET_KEY, URL, BUCKET")
            
        # 初始化MinIO客户端
        self.client = Minio(
            self.url.replace('http://', '').replace('https://', ''),
            access_key=self.access_key,
            secret_key=self.secret_key,
            secure=self.url.startswith('https://')
        )
        
        # 确保bucket存在
        self._ensure_bucket_exists()
        
        # 本地缓存目录
        self.cache_dir = Path.home() / '.cache' / 'hash_storage'
        self.cache_dir.mkdir(parents=True, exist_ok=True)
        
        self.meta_file = 'meta.json'
    
    def _ensure_bucket_exists(self):
        """确保bucket存在"""
        try:
            if not self.client.bucket_exists(self.bucket):
                self.client.make_bucket(self.bucket)
        except S3Error as e:
            raise RuntimeError(f"无法创建或访问bucket {self.bucket}: {e}")
    
    def _calculate_hash(self, file_path: Union[str, Path]) -> str:
        """计算文件的SHA256哈希值
        
        Args:
            file_path: 文件路径
            
        Returns:
            文件的SHA256哈希值
        """
        sha256_hash = hashlib.sha256()
        with open(file_path, "rb") as f:
            for chunk in iter(lambda: f.read(4096), b""):
                sha256_hash.update(chunk)
        return sha256_hash.hexdigest()
    
    def _get_meta_data(self) -> Dict:
        """从MinIO获取meta数据
        
        Returns:
            meta数据字典
        """
        try:
            response = self.client.get_object(self.bucket, self.meta_file)
            meta_data = json.loads(response.read().decode('utf-8'))
            response.close()
            response.release_conn()
            return meta_data
        except S3Error:
            # meta文件不存在，创建空的
            return {
                "version": "1.0",
                "hash_to_files": {},
                "file_to_hash": {},
                "last_updated": datetime.now().isoformat()
            }
    
    def _upload_meta_data(self, meta_data: Dict):
        """上传meta数据到MinIO
        
        Args:
            meta_data: meta数据字典
        """
        meta_data["last_updated"] = datetime.now().isoformat()
        meta_json = json.dumps(meta_data, indent=2, ensure_ascii=False)
        
        from io import BytesIO
        meta_stream = BytesIO(meta_json.encode('utf-8'))
        
        self.client.put_object(
            self.bucket,
            self.meta_file,
            meta_stream,
            len(meta_json.encode('utf-8')),
            content_type='application/json'
        )
    
    def _upload_file_content(self, file_path: Union[str, Path], file_hash: str):
        """上传文件内容到MinIO
        
        Args:
            file_path: 本地文件路径
            file_hash: 文件哈希值
        """
        object_name = f"{file_hash}.bin"
        
        # 检查文件是否已存在
        try:
            self.client.stat_object(self.bucket, object_name)
            return  # 文件已存在，无需重复上传
        except S3Error:
            pass  # 文件不存在，继续上传
        
        self.client.fput_object(self.bucket, object_name, str(file_path))
    
    def upload(self, path: Union[str, Path], filename: Optional[str] = None) -> bool:
        """上传文件或文件夹
        
        Args:
            path: 要上传的文件或文件夹路径
            filename: 自定义文件名（仅对单文件有效）
            
        Returns:
            上传是否成功
        """
        path = Path(path)
        
        if not path.exists():
            raise FileNotFoundError(f"路径不存在: {path}")
        
        try:
            meta_data = self._get_meta_data()
            
            if path.is_file():
                self._upload_single_file(path, filename, meta_data)
            elif path.is_dir():
                self._upload_directory(path, filename, meta_data)
            else:
                raise ValueError(f"不支持的路径类型: {path}")
            
            self._upload_meta_data(meta_data)
            return True
            
        except Exception as e:
            print(f"上传失败: {e}")
            return False
    
    def _upload_single_file(self, file_path: Path, custom_filename: Optional[str], meta_data: Dict):
        """上传单个文件
        
        Args:
            file_path: 文件路径
            custom_filename: 自定义文件名
            meta_data: meta数据字典
        """
        file_hash = self._calculate_hash(file_path)
        filename = custom_filename or file_path.name
        
        # 处理同名文件但哈希不同的情况
        if filename in meta_data["file_to_hash"]:
            old_hash = meta_data["file_to_hash"][filename]
            if old_hash != file_hash:
                # 从旧哈希的文件列表中移除
                if old_hash in meta_data["hash_to_files"]:
                    if filename in meta_data["hash_to_files"][old_hash]:
                        meta_data["hash_to_files"][old_hash].remove(filename)
        
        # 更新hash_to_files
        if file_hash not in meta_data["hash_to_files"]:
            meta_data["hash_to_files"][file_hash] = []
            # 上传文件内容
            self._upload_file_content(file_path, file_hash)
        
        if filename not in meta_data["hash_to_files"][file_hash]:
            meta_data["hash_to_files"][file_hash].append(filename)
            # print(f"append filename {filename} to {file_hash:8}")
        
        # 更新file_to_hash
        meta_data["file_to_hash"][filename] = file_hash
        print(f"append filename {filename} to {file_hash:8}...")
    
    def _upload_directory(self, dir_path: Path, custom_dirname: str, meta_data: Dict):
        """上传文件夹
        
        Args:
            dir_path: 文件夹路径
            meta_data: meta数据字典
        """
        custom_dirname = custom_dirname or dir_path.name
        for file_path in dir_path.rglob('*'):
            if file_path.is_file():
                # 计算相对路径作为文件名
                relative_path = file_path.relative_to(dir_path)
                filename = custom_dirname + "/" + str(relative_path).replace('\\', '/')
                self._upload_single_file(file_path, filename, meta_data)
    
    def download(self, filename: str, output_path: Union[str, Path]) -> bool:
        """下载文件或目录
        
        Args:
            filename: 要下载的文件名或目录名
            output_path: 输出路径
            
        Returns:
            下载是否成功
        """
        try:
            meta_data = self._get_meta_data()
            
            # 检查是否为单个文件
            if filename in meta_data["file_to_hash"]:
                return self._download_single_file(filename, output_path, meta_data)
            
            # 检查是否为目录（查找以filename/开头的文件）
            dir_prefix = filename.rstrip('/') + '/'
            matching_files = [f for f in meta_data["file_to_hash"].keys() if f.startswith(dir_prefix)]
            
            if not matching_files:
                print(f"文件或目录不存在: {filename}, dir_prefix={dir_prefix}")
                print(f"  可用文件: {meta_data['file_to_hash'].keys()}")
                return False
            
            # 下载目录中的所有文件
            return self._download_directory(matching_files, filename, output_path, meta_data)
            
        except Exception as e:
            print(f"下载失败: {e}")
            return False
    
    def _download_single_file(self, filename: str, output_path: Union[str, Path], meta_data: Dict) -> bool:
        """下载单个文件
        
        Args:
            filename: 文件名
            output_path: 输出路径
            meta_data: 元数据
            
        Returns:
            下载是否成功
        """
        file_hash = meta_data["file_to_hash"][filename]
        output_path = Path(output_path)
        
        # 确保输出目录存在
        output_path.parent.mkdir(parents=True, exist_ok=True)
        
        # 检查本地缓存
        cache_file = self.cache_dir / f"{file_hash}.bin"
        
        if cache_file.exists():
            # 从缓存复制
            shutil.copy2(cache_file, output_path)
        else:
            # 从MinIO下载
            object_name = f"{file_hash}.bin"
            self.client.fget_object(self.bucket, object_name, str(cache_file))
            shutil.copy2(cache_file, output_path)
        
        return True
    
    def _download_directory(self, matching_files: List[str], dir_name: str, output_path: Union[str, Path], meta_data: Dict) -> bool:
        """下载目录中的所有文件
        
        Args:
            matching_files: 匹配的文件列表
            dir_name: 目录名
            output_path: 输出路径
            meta_data: 元数据
            
        Returns:
            下载是否成功
        """
        output_path = Path(output_path)
        dir_prefix = dir_name.rstrip('/') + '/'
        
        success_count = 0
        total_count = len(matching_files)
        
        for file_path in matching_files:
            try:
                # 计算相对路径
                relative_path = file_path[len(dir_prefix):]
                target_path = output_path / relative_path
                
                # 下载单个文件
                if self._download_single_file(file_path, target_path, meta_data):
                    success_count += 1
                    print(f"✓ 已下载: {file_path} -> {target_path}")
                else:
                    print(f"✗ 下载失败: {file_path}")
                    
            except Exception as e:
                print(f"✗ 下载文件 {file_path} 时出错: {e}")
        
        print(f"目录下载完成: {success_count}/{total_count} 个文件成功")
        return success_count == total_count
    
    def list_files(self, dir_name: str = "", join_dir_name: bool = True) -> List[str]:
        """列出已上传的文件
        Args:
            dir_name: 目录名, 为空则列出所有文件
            recursive: 是否递归列出子目录中的文件
            include_dir: 是否包含目录名（虚拟目录）
        Returns:
            文件名列表
        """
        try:
            meta_data = self._get_meta_data()
            all_files = list(meta_data["file_to_hash"].keys())

            # filter dir_name
            if dir_name:
                dir_name = dir_name.replace("\\", "/")
                if not dir_name.endswith("/"):
                    dir_name += "/"
                all_files = [f for f in all_files if f.startswith(dir_name)]

            if join_dir_name or not dir_name:
                return all_files
            
            all_files_without_dir_name = []
            for f in all_files:
                all_files_without_dir_name.append(f[len(dir_name):])
            return all_files_without_dir_name
                
        except Exception as e:
            print(f"获取文件列表失败: {e}")
            return []
    
    def list(self, dir_name: str = '', join_dir_name: bool = True) -> Dict[str, List[str]]:
        try:
            meta_data = self._get_meta_data()
            all_files = list(meta_data["file_to_hash"].keys())

            # filter dir_name
            if dir_name:
                dir_name = dir_name.replace("\\", "/")
                if not dir_name.endswith("/"):
                    dir_name += "/"
                all_files = [f for f in all_files if f.startswith(dir_name)]
            
            # group by dir
            sub_dirs = set()
            file_names = []
            for f in all_files:
                file_name = f[len(dir_name):]
                dir_and_filename = list(file_name.split("/"))
                if len(dir_and_filename) == 1:
                    if join_dir_name:
                        file_names.append(f)
                    else:
                        file_names.append(dir_and_filename[0])
                else:
                    sub_dir_name = dir_and_filename[0]
                    
                    if join_dir_name:
                        sub_dirs.add(os.path.join(dir_name, sub_dir_name))
                    else:
                        sub_dirs.add(sub_dir_name)
            
            return {
                "dirs": list(sub_dirs),
                "files": file_names
            }


        except Exception as e:
            print(f"获取文件列表失败: {e}")
            return {
                "dirs": [],
                "files": []
            }
        


    def get_file_info(self, filename: str) -> Optional[Dict]:
        """获取文件信息
        
        Args:
            filename: 文件名
            
        Returns:
            文件信息字典或None
        """
        try:
            meta_data = self._get_meta_data()
            
            if filename not in meta_data["file_to_hash"]:
                return None
            
            file_hash = meta_data["file_to_hash"][filename]
            same_hash_files = meta_data["hash_to_files"].get(file_hash, [])
            
            # 获取文件大小
            file_size = self._get_file_size(file_hash)
            
            # 检查缓存状态
            cache_file = self.cache_dir / f"{file_hash}.bin"
            has_cache = cache_file.exists()
            
            return {
                "filename": filename,
                "hash": file_hash,
                "size": file_size,
                "has_cache": has_cache,
                "same_content_files": same_hash_files
            }
            
        except Exception as e:
            print(f"获取文件信息失败: {e}")
            return None
    
    def _get_file_size(self, file_hash: str) -> int:
        """获取文件大小
        
        Args:
            file_hash: 文件哈希值
            
        Returns:
            文件大小（字节）
        """
        try:
            object_name = f"{file_hash}.bin"
            stat = self.client.stat_object(self.bucket, object_name)
            return stat.size
        except S3Error:
            return 0
    
    def _format_file_size(self, size_bytes: int) -> str:
        """格式化文件大小
        
        Args:
            size_bytes: 文件大小（字节）
            
        Returns:
            格式化的文件大小字符串
        """
        if size_bytes == 0:
            return "0 B"
        
        size_names = ["B", "KB", "MB", "GB", "TB"]
        i = 0
        while size_bytes >= 1024 and i < len(size_names) - 1:
            size_bytes /= 1024.0
            i += 1
        
        return f"{size_bytes:.1f} {size_names[i]}"
    
    def clear_cache(self):
        """清空本地缓存"""
        try:
            shutil.rmtree(self.cache_dir)
            self.cache_dir.mkdir(parents=True, exist_ok=True)
            print("缓存已清空")
        except Exception as e:
            print(f"清空缓存失败: {e}")
    
    def delete_file(self, filename: str) -> bool:
        """删除文件（仅删除meta信息，不删除实际文件内容）
        
        Args:
            filename: 要删除的文件名
            
        Returns:
            删除是否成功
        """
        try:
            meta_data = self._get_meta_data()
            
            if filename not in meta_data["file_to_hash"]:
                print(f"文件不存在: {filename}")
                return False
            
            file_hash = meta_data["file_to_hash"][filename]
            
            # 从file_to_hash中删除
            del meta_data["file_to_hash"][filename]
            
            # 从hash_to_files中删除该文件名
            if file_hash in meta_data["hash_to_files"]:
                if filename in meta_data["hash_to_files"][file_hash]:
                    meta_data["hash_to_files"][file_hash].remove(filename)
                    
                # 如果该哈希值下没有其他文件了，删除整个条目
                if not meta_data["hash_to_files"][file_hash]:
                    del meta_data["hash_to_files"][file_hash]
            
            # 更新meta数据
            self._upload_meta_data(meta_data)
            print(f"文件已删除: {filename}")
            return True
            
        except Exception as e:
            print(f"删除文件失败: {e}")
            return False
    
    def delete_directory(self, dirname: str) -> bool:
        """删除目录（仅删除meta信息，不删除实际文件内容）
        
        Args:
            dirname: 要删除的目录名
            
        Returns:
            删除是否成功
        """
        try:
            meta_data = self._get_meta_data()
            
            # 查找以dirname/开头的所有文件
            dir_prefix = dirname.rstrip('/') + '/'
            matching_files = [f for f in meta_data["file_to_hash"].keys() if f.startswith(dir_prefix)]
            
            if not matching_files:
                print(f"目录不存在或为空: {dirname}")
                return False
            
            # 删除所有匹配的文件
            deleted_count = 0
            for filename in matching_files:
                if self._delete_single_file_from_meta(filename, meta_data):
                    deleted_count += 1
            
            # 更新meta数据
            self._upload_meta_data(meta_data)
            print(f"目录已删除: {dirname} ({deleted_count} 个文件)")
            return deleted_count > 0
            
        except Exception as e:
            print(f"删除目录失败: {e}")
            return False
    
    def _delete_single_file_from_meta(self, filename: str, meta_data: Dict) -> bool:
        """从meta数据中删除单个文件
        
        Args:
            filename: 文件名
            meta_data: meta数据字典
            
        Returns:
            删除是否成功
        """
        if filename not in meta_data["file_to_hash"]:
            return False
        
        file_hash = meta_data["file_to_hash"][filename]
        
        # 从file_to_hash中删除
        del meta_data["file_to_hash"][filename]
        
        # 从hash_to_files中删除该文件名
        if file_hash in meta_data["hash_to_files"]:
            if filename in meta_data["hash_to_files"][file_hash]:
                meta_data["hash_to_files"][file_hash].remove(filename)
                
            # 如果该哈希值下没有其他文件了，删除整个条目
            if not meta_data["hash_to_files"][file_hash]:
                del meta_data["hash_to_files"][file_hash]
        
        return True
    
    def rename_file(self, old_filename: str, new_filename: str) -> bool:
        """重命名文件（仅修改meta信息）
        
        Args:
            old_filename: 原文件名
            new_filename: 新文件名
            
        Returns:
            重命名是否成功
        """
        try:
            meta_data = self._get_meta_data()
            
            if old_filename not in meta_data["file_to_hash"]:
                print(f"文件不存在: {old_filename}")
                return False
            
            if new_filename in meta_data["file_to_hash"]:
                print(f"目标文件名已存在: {new_filename}")
                return False
            
            file_hash = meta_data["file_to_hash"][old_filename]
            
            # 更新file_to_hash
            meta_data["file_to_hash"][new_filename] = file_hash
            del meta_data["file_to_hash"][old_filename]
            
            # 更新hash_to_files
            if file_hash in meta_data["hash_to_files"]:
                if old_filename in meta_data["hash_to_files"][file_hash]:
                    meta_data["hash_to_files"][file_hash].remove(old_filename)
                    meta_data["hash_to_files"][file_hash].append(new_filename)
            
            # 更新meta数据
            self._upload_meta_data(meta_data)
            print(f"文件已重命名: {old_filename} -> {new_filename}")
            return True
            
        except Exception as e:
            print(f"重命名文件失败: {e}")
            return False
    
    def rename_directory(self, old_dirname: str, new_dirname: str) -> bool:
        """重命名目录（仅修改meta信息）
        
        Args:
            old_dirname: 原目录名
            new_dirname: 新目录名
            
        Returns:
            重命名是否成功
        """
        try:
            meta_data = self._get_meta_data()
            
            # 查找以old_dirname/开头的所有文件
            old_prefix = old_dirname.rstrip('/') + '/'
            new_prefix = new_dirname.rstrip('/') + '/'
            matching_files = [f for f in meta_data["file_to_hash"].keys() if f.startswith(old_prefix)]
            
            if not matching_files:
                print(f"目录不存在或为空: {old_dirname}")
                return False
            
            # 检查新目录名是否会产生冲突
            for old_file in matching_files:
                new_file = new_prefix + old_file[len(old_prefix):]
                if new_file in meta_data["file_to_hash"]:
                    print(f"重命名会产生冲突，目标文件已存在: {new_file}")
                    return False
            
            # 重命名所有匹配的文件
            renamed_count = 0
            for old_file in matching_files:
                new_file = new_prefix + old_file[len(old_prefix):]
                if self._rename_single_file_in_meta(old_file, new_file, meta_data):
                    renamed_count += 1
            
            # 更新meta数据
            self._upload_meta_data(meta_data)
            print(f"目录已重命名: {old_dirname} -> {new_dirname} ({renamed_count} 个文件)")
            return renamed_count > 0
            
        except Exception as e:
            print(f"重命名目录失败: {e}")
            return False
    
    def _rename_single_file_in_meta(self, old_filename: str, new_filename: str, meta_data: Dict) -> bool:
        """在meta数据中重命名单个文件
        
        Args:
            old_filename: 原文件名
            new_filename: 新文件名
            meta_data: meta数据字典
            
        Returns:
            重命名是否成功
        """
        if old_filename not in meta_data["file_to_hash"]:
            return False
        
        file_hash = meta_data["file_to_hash"][old_filename]
        
        # 更新file_to_hash
        meta_data["file_to_hash"][new_filename] = file_hash
        del meta_data["file_to_hash"][old_filename]
        
        # 更新hash_to_files
        if file_hash in meta_data["hash_to_files"]:
            if old_filename in meta_data["hash_to_files"][file_hash]:
                meta_data["hash_to_files"][file_hash].remove(old_filename)
                meta_data["hash_to_files"][file_hash].append(new_filename)
        
        return True


if __name__ == "__main__":
    hs = HashStorage()
    print(hs.list())
    print(hs.list('third_packages/TensorRT-10.8.0.43/samples/trtexec'))