import aiohttp
import asyncio
import os
import json
import hashlib
from tqdm import tqdm

from config import CLIENT_FILE_PATH, CLIENT_TEMP_CHUNK_PATH, DOWNLOAD_BASE_URL


async def download_chunk(session, file_url, local_filename, chunk, chunk_index, lock, downloaded_chunks, total_size, progress_bar, semaphore):
    async with semaphore:  # 使用信号量限制并发数量
        start = chunk['start']
        end = chunk['end']

        # 发送带有 Range 头的请求
        headers = {'Range': f'bytes={start}-{end}'}
        async with session.get(file_url, headers=headers) as response:
            if response.status == 206:
                # 保存分块数据到本地文件
                chunk_filename = f"{local_filename}_filechunk_{chunk_index}"
                with open(os.path.join(CLIENT_TEMP_CHUNK_PATH, chunk_filename), 'wb') as f:
                    while True:
                        chunk_data = await response.content.read(8192)
                        if not chunk_data:
                            break
                        f.write(chunk_data)
                        # 更新进度条
                        progress_bar.update(len(chunk_data))
                # 更新已下载的分块
                await lock.acquire()  # 获取锁
                try:
                    downloaded_chunks.append(chunk_index)
                finally:
                    lock.release()  # 释放锁
                # 更新本地 range.json
                update_local_range_json(local_filename, downloaded_chunks)
            else:
                print(f"Failed to download chunk {chunk_index}. Status code: {response.status}")


# 更新本地 range.json
def update_local_range_json(local_filename, downloaded_chunks):
    range_json_path = os.path.join(CLIENT_FILE_PATH, local_filename + '.json')
    if os.path.exists(range_json_path):
        with open(range_json_path, 'r') as f:
            range_data = json.load(f)
        range_data['downloaded_chunks'] = downloaded_chunks
        with open(range_json_path, 'w') as f:
            json.dump(range_data, f, indent=4)


def calculate_file_hash(filepath):
    hash_md5 = hashlib.md5()
    with open(filepath, "rb") as f:
        for chunk in iter(lambda: f.read(4096), b""):
            hash_md5.update(chunk)
    return hash_md5.hexdigest()


# 验证文件哈希
def verify_file_hash(filepath, expected_hash):
    hash_md5 = hashlib.md5()
    with open(filepath, "rb") as f:
        for chunk in iter(lambda: f.read(4096), b""):
            hash_md5.update(chunk)
    return hash_md5.hexdigest() == expected_hash


async def download_file(file_url, local_filename):
    # 第一次请求，获取 range.json
    async with aiohttp.ClientSession() as session:
        async with session.get(file_url) as response:
            if response.status != 200:
                print(f"Failed to fetch {local_filename} range.json from server_rec_file")
                return
            range_data = await response.json()

    # 检查是否支持断点续传
    if not range_data.get("supports_resume", False):
        print("Server does not support resume.")
        return

    # 初始化本地 range.json
    local_range_json_path = os.path.join(CLIENT_FILE_PATH, local_filename + '.json')
    if os.path.exists(local_range_json_path):
        with open(local_range_json_path, 'r') as f:
            local_range_json = json.load(f)
    else:
        local_range_json = {
            "filename": range_data.get("filename", 0),  # 文件名
            "supports_resume": range_data.get("supports_resume", False),
            "file_size": range_data.get("file_size", 0),
            "total_chunks": range_data.get("chunk_count", 0),
            "downloaded_chunks": [],
            "file_hash": range_data.get("file_hash", ""),  # 保存服务器返回的哈希值
            "download_accomplished": False  # 下载完成标志
        }
        with open(local_range_json_path, 'w') as f:
            json.dump(local_range_json, f, indent=4)

    # 检查文件 (是否已经下载完成) and (哈希是否匹配)
    if (os.path.exists(os.path.join(CLIENT_FILE_PATH, local_filename)) and
            verify_file_hash(os.path.join(CLIENT_FILE_PATH, local_filename), local_range_json['file_hash'])):
        print(f"{local_filename} already downloaded and verified.")
        return
    else:
        print(f"{local_filename} already downloaded but hash verification failed. Redownloading...")
        local_range_json['downloaded_chunks'] = []  # 清空已下载分块记录
        local_range_json["download_accomplished"] = False  # 清空下载完成标志

    # 获取总文件大小
    total_size = local_range_json['file_size']

    # 初始化进度条
    progress_bar = tqdm(total=total_size, unit='B', unit_scale=True, desc="Downloading")

    # 异步下载
    lock = asyncio.Lock()
    downloaded_chunks = local_range_json['downloaded_chunks']
    semaphore = asyncio.Semaphore(2)  # 限制并发数量为 2

    async with aiohttp.ClientSession() as session:
        tasks = []
        for chunk_index, chunk in enumerate(range_data['chunks']):
            if chunk_index in downloaded_chunks:
                # 跳过已下载的分块，并更新进度条
                progress_bar.update(chunk['end'] - chunk['start'] + 1)
                continue

            task = asyncio.create_task(download_chunk(session, file_url, local_filename, chunk, chunk_index, lock, downloaded_chunks, total_size, progress_bar, semaphore))
            tasks.append(task)

        await asyncio.gather(*tasks)

    # 关闭进度条
    progress_bar.close()

    # 合并所有分块
    with open(os.path.join(CLIENT_FILE_PATH, local_filename), 'wb') as final_file:
        for chunk_index in range(len(range_data['chunks'])):
            chunk_filename = os.path.join(CLIENT_TEMP_CHUNK_PATH, f"{local_filename}_filechunk_{chunk_index}")
            if os.path.exists(chunk_filename):
                with open(chunk_filename, 'rb') as chunk_file:
                    final_file.write(chunk_file.read())
                os.remove(chunk_filename)  # 删除已合并的分块文件
            else:
                print(f"Chunk {chunk_index} is missing!")

    # 下载完成后，验证文件哈希
    if verify_file_hash(os.path.join(CLIENT_FILE_PATH, local_filename), local_range_json['file_hash']):
        local_range_json["download_accomplished"] = True
        print(f"File integrity verified: {local_filename}")
        print(f"localfile_hash: {local_range_json['file_hash']}")
        print(f"servefile_hash: {calculate_file_hash(os.path.join(CLIENT_FILE_PATH, local_filename))}")
    else:
        local_range_json["download_accomplished"] = False
        print(f"File integrity check failed: {local_filename}")
    with open(os.path.join(CLIENT_FILE_PATH, local_filename + '.json'), 'w') as f:
        json.dump(local_range_json, f, indent=4)


def rx_file(file_baseurl, file_name):
    if not os.path.exists(CLIENT_FILE_PATH):
        os.makedirs(CLIENT_FILE_PATH)
    if not os.path.exists(CLIENT_TEMP_CHUNK_PATH):
        os.makedirs(CLIENT_TEMP_CHUNK_PATH)
    asyncio.run(download_file(file_baseurl, file_name))


if __name__ == '__main__':
    server_baseurl = DOWNLOAD_BASE_URL
    filename = 'random_binary_file.bin'
    file_url = server_baseurl + filename

    rx_file(file_url, filename)
