import asyncio
from datetime import datetime
import websockets
import json
import os
import httpx  # 替换 aiohttp
import basicFuncation

CLIENT_FILES_DIR = os.path.join('E:\\project\\python\\DataStream\\Upload', "client_files")
CLIENT_RANGE_DIR = os.path.join('E:\\project\\python\\DataStream\\Upload', "client_range")
CHUNKS_SIZE = (16/1024)  # 分块大小 (MB)
MAX_CONCURRENT_UPLOADS = 1  # 最大并发上传线程数
TARGET_IP = "127.0.0.1"

pause_event = asyncio.Event()  # 创建一个暂停事件对象
resume_event = asyncio.Event()  # 创建一个暂停事件对象


# websocket 响应请求头 和 发送Range.json文件
async def connect_to_server():
    url = f"ws://{TARGET_IP}:8765"
    global pause_event, resume_event
    while True:
        try:
            async with websockets.connect(url) as websocket:
                print("Connected to server_rec_file")
                # 已经连接上了，开始轮询查看
                while True:
                    message = await websocket.recv()
                    message = json.loads(message)

                    # Situation 1 : 获取文件
                    if message['type'] == 'request_file':
                        file_name = message['file_name']
                        # 发送文件状态给服务器
                        filepath = os.path.join(CLIENT_FILES_DIR, file_name)
                        if not os.path.exists(filepath):
                            # 没有找到文件
                            await websocket.send(json.dumps({'type': 'file_info',
                                                             'file_exists': False,
                                                             'message': f'File not found',
                                                             'range_data': None}))
                        else:
                            # 找到文件发送
                            print(datetime.now().strftime("[%H:%M:%S,%f]"), f"命中文件: {filepath}")
                            # 初始化本地 range.json
                            local_range_json = basicFuncation.generate_range_file(file_name, generate_hash=False,
                                                                                  chunk_size=int(CHUNKS_SIZE * 1024 * 1024))
                            local_range_json_path = os.path.join(CLIENT_RANGE_DIR, file_name + '.json')
                            if os.path.exists(local_range_json_path):
                                with open(local_range_json_path, 'r') as f:
                                    file_hash = json.load(f)['file_hash']
                                local_range_json['file_hash'] = file_hash
                            else:
                                file_hash = basicFuncation.calculate_file_hash(os.path.join(CLIENT_FILES_DIR, file_name))
                                local_range_json['file_hash'] = file_hash
                                with open(local_range_json_path, 'w') as f:
                                    json.dump(local_range_json, f, indent=4)
                            await websocket.send(json.dumps({'type': 'file_info',
                                                             'file_exists': True,
                                                             'message': f'File found: {file_name}',
                                                             'range_data': local_range_json}))
                            message = await websocket.recv()
                            message = json.loads(message)
                            if message['type'] == 'server_range_json':
                                from relay_server import RELAY_PORT
                                sendto_url = message['sendto_url'].replace(':5000', f':{RELAY_PORT}')
                                server_range_json = message['server_range_json']

                            # 使用多线程上传文件
                            upoad_task = asyncio.create_task(upload_file_with_resume(file_name, sendto_url, server_range_json, local_range_json))

                    # Situation 2 : 获取文件树
                    elif message['type'] == 'request_file_tree':
                        directory_structure = basicFuncation.traverse_directory_to_dict(CLIENT_FILES_DIR)
                        await websocket.send(json.dumps(directory_structure, indent=4, ensure_ascii=False))

                    # Situation 3 : 暂停上传
                    elif message['type'] == 'require_file_pause':
                        print(datetime.now().strftime("[%H:%M:%S,%f]"), f"File Upload paused")
                        pause_event.set()
                        resume_event.clear()

                    # Situation 4 : 恢复上传
                    elif message['type'] == 'require_file_resume':
                        print(datetime.now().strftime("[%H:%M:%S,%f]"), f"File Upload resumed")
                        pause_event.clear()
                        resume_event.set()

                    # Situation 5 : 未知消息类型
                    else:
                        await websocket.send(json.dumps({'type': 'error', 'message': 'Unknown message type'}))
                        print(datetime.now().strftime("[%H:%M:%S,%f]"), f"Unknown message type: {message['type']}")

        except Exception as e:
            print(f"Error: {e}")
        finally:
            print("")
        await asyncio.sleep(1)  # 等待1秒后重试


async def upload_file(url, client, chunk_data, metadata):
    """上传单个文件块"""
    # 创建 FormData 对象
    files = {'file': chunk_data}
    data = {'metadata': json.dumps(metadata)}
    # 发送请求
    response = await client.post(url, files=files, data=data)
    result = response.json()
    status_code = response.status_code
    return result, status_code


async def upload_chunk(url, client, file_path, chunk_index, chunk_range, chunk_queue, server_range_json):
    """上传单个文件块（无需信号量）"""
    start = chunk_range['start']
    end = chunk_range['end'] + 1
    chunk_size = end - start

    file_metadata = {
        'chunk_index': chunk_index,
        'file_seek': start,
        'chunk_file_hash': 0,
    }

    try:
        with open(file_path, 'rb') as file:
            file.seek(start)
            chunk_stream = file.read(chunk_size)
            files = {'file': ('chunk', chunk_stream)}
            data = {'metadata': json.dumps(file_metadata)}
            response = await client.post(url, files=files, data=data, timeout=5.0)
            result = response.json()
            status_code = response.status_code

            if status_code == 200:
                print(datetime.now().strftime("[%H:%M:%S,%f]"), f"Chunk {chunk_index} uploaded successfully",
                      f"{len(server_range_json['downloaded_chunks']) / server_range_json['total_chunks'] * 100}%")
            elif status_code == 400:
                print(datetime.now().strftime("[%H:%M:%S,%f]"), result['message'])
    except asyncio.TimeoutError:
        print(datetime.now().strftime("[%H:%M:%S,%f]"), f"Chunk {chunk_index} upload timeout")
        if chunk_index in server_range_json['downloaded_chunks']:
            server_range_json['downloaded_chunks'].remove(chunk_index)
        server_range_json.setdefault('error_chunks', []).append(chunk_index)
    except Exception as e:
        print(datetime.now().strftime("[%H:%M:%S,%f]"), f"Chunk {chunk_index} failed: {e}")
        await chunk_queue.put(chunk_index)


async def upload_file_with_resume(filename, sendto_url, server_range_json, local_range_json):
    global pause_event, resume_event
    file_path = os.path.join(CLIENT_FILES_DIR, filename)
    total_chunks = local_range_json['chunk_count']
    chunks = local_range_json['chunks']
    downloaded_chunks = server_range_json['downloaded_chunks']
    downloaded_chunks_lock = asyncio.Lock()

    # 创建任务队列
    chunk_queue = asyncio.Queue()

    # 初始化队列：将所有未上传的 chunk 加入队列
    for chunk_index in range(total_chunks):
        if chunk_index not in downloaded_chunks:
            await chunk_queue.put(chunk_index)

    # 定义工作协程
    async def worker(worker_id):
        while not chunk_queue.empty():
            # 检查是否需要暂停
            if pause_event.is_set():
                print(datetime.now().strftime("[%H:%M:%S,%f]"), f"Worker {worker_id} paused")
                await resume_event.wait()
                print(datetime.now().strftime("[%H:%M:%S,%f]"), f"Worker {worker_id} resumed")

            # 从队列中获取 chunk 索引
            chunk_index = await chunk_queue.get()

            try:
                # 上传 chunk
                # print(f"Uploading to URL: {sendto_url}")  # 调试信息
                try:
                    async with httpx.AsyncClient() as client:
                        await upload_chunk(sendto_url, client, file_path, chunk_index, chunks[chunk_index], chunk_queue, server_range_json)
                except Exception as e:
                    print(datetime.now().strftime("[%H:%M:%S,%f]"), f"Error in AsyncClient: {e}")
                    raise  # 继续抛出异常以便调试

                # 标记该 chunk 为已上传
                async with downloaded_chunks_lock:
                    if chunk_index not in server_range_json['downloaded_chunks']:
                        server_range_json['downloaded_chunks'].append(chunk_index)
                        # 更新并打印上传进度
                        progress = len(server_range_json['downloaded_chunks']) / total_chunks * 100
                        print(datetime.now().strftime("[%H:%M:%S,%f]"), f"Progress: {progress:.2f}%")

            except Exception as e:
                # 如果上传失败，重新将 chunk 放回队列
                print(datetime.now().strftime("[%H:%M:%S,%f]"), f"Chunk {chunk_index} failed: {e}")
                await chunk_queue.put(chunk_index)
            finally:
                chunk_queue.task_done()

    # 启动 MAX_CONCURRENT_UPLOADS 个工作协程
    workers = [
        asyncio.create_task(worker(i))
        for i in range(MAX_CONCURRENT_UPLOADS)
    ]

    # 等待所有任务完成
    await chunk_queue.join()

    # 取消所有工作协程
    for w in workers:
        w.cancel()

    # 检查上传是否完成
    pause_event.clear()
    resume_event.set()
    if len(server_range_json['downloaded_chunks']) == total_chunks:
        print(datetime.now().strftime("[%H:%M:%S,%f]"), f"上传成功：File uploaded successfully")
    else:
        print(datetime.now().strftime("[%H:%M:%S,%f]"), "File upload failed or incomplete")


if __name__ == '__main__':
    # 配置Windows事件循环
    # if os.name == 'nt':
    #     asyncio.set_event_loop_policy(asyncio.WindowsSelectorEventLoopPolicy())

    # 确保客户端文件目录存在
    os.makedirs(CLIENT_FILES_DIR, exist_ok=True)
    os.makedirs(CLIENT_RANGE_DIR, exist_ok=True)
    # 运行主程序
    asyncio.get_event_loop().run_until_complete(connect_to_server())
