from itsdangerous import URLSafeTimedSerializer
from datetime import datetime, timedelta, time
import hashlib
import os


# 连接保持时间
EXPIRES_SECOND = 600


def generate_upload_link(file_name, secret_key="your-secret-key", expires_in=EXPIRES_SECOND):
    serializer = URLSafeTimedSerializer("secret_key")
    expires_time = datetime.now() + timedelta(seconds=expires_in)
    # 生成签名令牌
    link_id = serializer.dumps({'file_name': file_name, 'expires': expires_time.isoformat()})
    # 返回短连接
    return link_id, expires_time


def calculate_file_hash(file_path):
    """计算文件的哈希值"""
    hash_md5 = hashlib.md5()
    with open(file_path, "rb") as f:
        for chunk in iter(lambda: f.read(4096), b""):
            hash_md5.update(chunk)
    return hash_md5.hexdigest()


# 生成 range.json 文件
def generate_range_file(filename, chunk_size=1024 * 1024, generate_hash=True):
    from client import CLIENT_FILES_DIR
    filepath = os.path.join(CLIENT_FILES_DIR, filename)
    if not os.path.exists(filepath):
        return None

    file_size = os.path.getsize(filepath)
    ranges = []

    start = 0
    while start < file_size:
        end = min(start + chunk_size - 1, file_size - 1)
        ranges.append({'start': start, 'end': end})
        start = end + 1
    # 计算文件的哈希值
    if generate_hash:
        file_hash = calculate_file_hash(filepath)
    else:
        file_hash = None
    # 构建 range.json 内容
    range_data = {
        "filename": filename,  # 文件名
        "supports_resume": True,  # 是否支持断点续传
        "file_size": file_size,  # 文件大小
        "chunk_count": len(ranges),  # 分块个数
        "chunks": ranges,  # 每个分块的字节范围
        "file_hash": file_hash  # 文件的哈希值
    }
    print(f"Generated range file for {filename}")
    return range_data


def format_size(size_bytes):
    """将文件大小格式化为更友好的单位（KB、MB、GB）"""
    if size_bytes < 1024:
        return f"{size_bytes} bytes"
    elif size_bytes < 1024 * 1024:
        return f"{size_bytes / 1024:.2f} KB"
    elif size_bytes < 1024 * 1024 * 1024:
        return f"{size_bytes / (1024 * 1024):.2f} MB"
    else:
        return f"{size_bytes / (1024 * 1024 * 1024):.2f} GB"


def traverse_directory_to_dict(path):
    """递归遍历文件夹，返回嵌套字典结构"""
    dir_info = {
        "name": os.path.basename(path),
        "type": "directory",
        "children": []
    }

    for item in os.listdir(path):
        item_path = os.path.join(path, item)
        if os.path.isdir(item_path):
            # 如果是文件夹，递归处理
            dir_info["children"].append(traverse_directory_to_dict(item_path))
        else:
            # 如果是文件，添加文件信息
            file_size = os.path.getsize(item_path)
            dir_info["children"].append({
                "name": item,
                "type": "file",
                "size_bytes": file_size,
                "size_formatted": format_size(file_size)
            })

    return dir_info