import asyncio
from datetime import datetime, timedelta, time
import uvicorn
from fastapi import FastAPI, Request, File, UploadFile, Form, HTTPException
from fastapi.responses import JSONResponse
import logging
import os
import json
import ControlSql as msql
import basicFuncation
import aiofiles


class MicrosecondFormatter(logging.Formatter):
    def formatTime(self, record, datefmt=None):
        ct = self.converter(record.created)  # 将时间戳转换为时间元组
        if datefmt:
            if "%f" in datefmt:
                # 使用 datetime 支持微秒
                dt = datetime.fromtimestamp(record.created)
                return dt.strftime(datefmt)
            else:
                # 使用 time.strftime 格式化
                return time.strftime(datefmt, ct)
        else:
            # 默认格式
            t = time.strftime("%Y-%m-%d %H:%M:%S", ct)
            s = "%s,%03d" % (t, record.msecs)
            return s


# 配置 logging
formatter = MicrosecondFormatter(
    fmt="[%(asctime)s] %(levelname)s: %(message)s",
    datefmt="%Y-%m-%d %H:%M:%S,%f"  # 包含微秒
)
handler = logging.StreamHandler()
handler.setFormatter(formatter)

logger = logging.getLogger()
logger.setLevel(logging.INFO)
logger.addHandler(handler)

app = FastAPI()

# 设置文件保存的目录
UPLOAD_FOLDER = os.path.join('E:\\project\\python\\DataStream\\Upload', 'server_files')
TEMP_FOLDER = os.path.join('temp')

# 确保目录存在
os.makedirs(UPLOAD_FOLDER, exist_ok=True)
os.makedirs(TEMP_FOLDER, exist_ok=True)

# 创建一个全局锁字典
downloaded_chunks_info = {}
downloaded_enter_lock = asyncio.Lock()

# 内存缓存区域
memory_cache = {}  # 记录chunk的内容：{file_hash: {chunk_index: content}}
chunk_timestamps = {}  # 记录chunk的写入时间：{file_hash: {chunk_index: timestamp}}
cache_used = 0  # 当前内存使用量
MAX_CACHE_SIZE = 32 * 1024 * 1024  # 32MB 内存缓存
MEMORY_THRESHOLD = 0.75  # 内存使用率阈值
cache_lock = asyncio.Lock()  # 缓存锁

# 全局文件句柄管理
file_handles = {}  # 存储文件句柄：{file_hash: file_handle}


async def get_file_handle(file_hash: str):
    """
    获取文件句柄，如果不存在则创建
    """
    if file_hash not in file_handles:
        file_path = os.path.join(UPLOAD_FOLDER, file_hash)
        os.makedirs(os.path.dirname(file_path), exist_ok=True)
        file_handles[file_hash] = await aiofiles.open(file_path, "r+b")
    return file_handles[file_hash]


async def close_file_handle(file_hash: str):
    """
    关闭文件句柄
    """
    if file_hash in file_handles:
        await file_handles[file_hash].close()
        del file_handles[file_hash]


async def cache_chunk(file_hash: str, chunk_index: int, chunk_size: int, content: bytes):
    """
    缓存文件块，并根据内存使用情况动态调整缓存策略
    """
    global cache_used
    async with cache_lock:
        # 检查内存是否充足
        if cache_used + len(content) > MAX_CACHE_SIZE * MEMORY_THRESHOLD:
            # 内存不足，触发缓存淘汰
            await evict_cache(chunk_size)

        # 缓存当前块
        if file_hash not in memory_cache:
            memory_cache[file_hash] = {}
            chunk_timestamps[file_hash] = {}
        memory_cache[file_hash][chunk_index] = content
        chunk_timestamps[file_hash][chunk_index] = datetime.now().timestamp()
        cache_used += len(content)


async def evict_cache(chunk_size: int):
    """
    淘汰缓存块以释放内存：
    1. 优先选择某个文件的最长连续块写入磁盘。
    2. 如果没有足够长的连续块，则选择最旧的连续块写入磁盘。
    """
    global cache_used
    while cache_used > MAX_CACHE_SIZE * MEMORY_THRESHOLD:
        # 查找所有文件的连续块
        best_candidate = None
        best_length = 0
        best_timestamp = float('inf')

        for file_hash in memory_cache:
            chunks = sorted(memory_cache[file_hash].keys())  # 按块索引排序
            if not chunks:
                continue

            # 查找最长连续块
            current_length = 1
            max_length = 1
            start_index = chunks[0]

            for i in range(1, len(chunks)):
                if chunks[i] == chunks[i - 1] + 1:
                    current_length += 1
                    if current_length > max_length:
                        max_length = current_length
                        start_index = chunks[i - current_length + 1]
                else:
                    current_length = 1

            # 如果当前文件的最长连续块比之前的候选更长，则更新候选
            if max_length > best_length:
                best_length = max_length
                best_candidate = (file_hash, start_index, max_length)

            # 如果没有足够长的连续块，则选择最旧的连续块
            if best_length == 0:
                oldest_chunk_index = min(chunk_timestamps[file_hash].keys(), key=lambda k: chunk_timestamps[file_hash][k])
                best_candidate = (file_hash, oldest_chunk_index, 1)
                best_timestamp = chunk_timestamps[file_hash][oldest_chunk_index]

        if not best_candidate:
            break

        # 淘汰候选块
        file_hash, start_index, length = best_candidate
        chunks_to_evict = range(start_index, start_index + length)
        for chunk_index in chunks_to_evict:
            if chunk_index in memory_cache[file_hash]:
                content = memory_cache[file_hash].pop(chunk_index)
                chunk_timestamps[file_hash].pop(chunk_index)
                await write_to_disk(file_hash, chunk_index, content, chunk_size)  # 假设块大小为 1MB


async def write_to_disk(file_hash: str, chunk_index: int, content: bytes, chunk_size: int):
    """
    将缓存块写入硬盘的指定位置
    """
    global cache_used
    file_handle = await get_file_handle(file_hash)
    # 定位到 chunk_index * chunk_size 处
    await file_handle.seek(chunk_index * chunk_size)
    # 写入内容
    await file_handle.write(content)
    cache_used -= len(content)
    logger.info(f"Chunk {chunk_index} of file {file_hash} written to disk {chunk_index * chunk_size}.")


async def finalize_file(file_hash: str, chunk_size: int):
    """
    文件传输完成后，将剩余缓存写入硬盘并关闭文件句柄
    """
    async with cache_lock:
        if file_hash in memory_cache:
            # 将剩余缓存写入硬盘
            for chunk_index, content in memory_cache[file_hash].items():
                await write_to_disk(file_hash, chunk_index, content, chunk_size)

            # 清空缓存
            memory_cache.pop(file_hash)
            chunk_timestamps.pop(file_hash)

            # 关闭文件句柄
            await close_file_handle(file_hash)


@app.post("/upload/{link_id}")
async def upload_file(link_id: str, file: UploadFile = File(...), metadata: str = Form(...)):
    # logger.info("文件上传开始")
    # 保证 downloaded_enter_lock 每个链接对应一个线程锁
    async with downloaded_enter_lock:
        # 获取当前连接的文件参数
        if link_id not in downloaded_chunks_info.keys():
            downloaded_chunks_info[link_id] = {}
            # 创建线程锁
            downloaded_chunks_info[link_id]['lock'] = asyncio.Lock()

            # 从数据库中获取信息
            async with downloaded_chunks_info[link_id]['lock']:
                upload_links = msql.get_upload_link(link_id)
                if not upload_links:
                    raise HTTPException(status_code=404, detail="Link not found")
                else:
                    # 链接的数据 存入内存，减少对数据库的访问
                    downloaded_chunks_info[link_id]['info'] = upload_links

    filename = downloaded_chunks_info[link_id]['info']['file_name']
    file_hash = downloaded_chunks_info[link_id]['info']['file_hash']
    file_size = downloaded_chunks_info[link_id]['info']['file_size']
    chunk_count = downloaded_chunks_info[link_id]['info']['file_chunk']
    chunk_size = int(file_size / chunk_count)

    # 解析 metadata
    file_metadata = json.loads(metadata)
    chunk_index = int(file_metadata['chunk_index'])
    chunk_start = int(file_metadata['file_seek'])
    chunk_file_hash = file_metadata['chunk_file_hash']

    # 使用线程锁保护对该link_id的 downloaded_chunks 的访问
    async with downloaded_chunks_info[link_id]['lock']:
        # 更新 expires_at 的参数
        from basicFuncation import EXPIRES_SECOND
        downloaded_chunks_info[link_id]['info']['url_using'] = True
        downloaded_chunks_info[link_id]['info']['expires_at'] = datetime.now() + timedelta(seconds=EXPIRES_SECOND)

    # logger.info("文件接收到了")
    # 缓存文件块
    file_content = await file.read()
    await cache_chunk(file_hash, chunk_index, chunk_size, file_content)

    # 更新 downloaded_chunks
    async with downloaded_chunks_info[link_id]['lock']:
        downloaded_chunks = json.loads(downloaded_chunks_info[link_id]['info']['downloaded_chunks'])
        downloaded_chunks.append(chunk_index)
        downloaded_chunks_info[link_id]['info']['downloaded_chunks'] = json.dumps(downloaded_chunks)

        # 检查是否所有 chunk 都已上传
        if len(downloaded_chunks) == chunk_count:
            logger.info("全部chunk上传完成，开始写入硬盘")
            await finalize_file(file_hash, chunk_size)
            # 计算文件哈希并验证
            rec_file_path = os.path.join(UPLOAD_FOLDER, file_hash)
            rec_file_hash = basicFuncation.calculate_file_hash(rec_file_path)
            if rec_file_hash == file_hash:
                # 文件上传完成，写入 local_file 的数据库表
                logger.info(f'文件 {filename} 上传成功')
                if msql.get_local_files(search_data=file_hash) is not None:
                    msql.update_local_files(file_hash=file_hash, file_exist=True)
                else:
                    msql.update_local_files(file_hash=file_hash,
                                            file_name=filename,
                                            file_exist=True,
                                            file_size=file_size,
                                            file_path=rec_file_path,
                                            upload_time=datetime.now())
                msql.update_upload_link(file_hash, expires_at=downloaded_chunks_info[link_id]['info']['expires_at'],
                                        url_using=False, downloaded_chunks=downloaded_chunks_info[link_id]['info']['downloaded_chunks'])
                del downloaded_chunks_info[link_id]  # 删除上传链接
                return JSONResponse(content={"message": "File uploaded successfully", "rec_file_path": rec_file_path,
                                             "chunk_index": chunk_index}, status_code=200)
            else:
                logger.info(f'文件{filename}哈希不匹配')
                del downloaded_chunks_info[link_id]  # 删除上传链接
                return JSONResponse(content={"message": "File hash mismatch", "rec_file_hash": rec_file_hash},
                                    status_code=400)
        # 返回当前 chunk 上传成功
        else:
            logger.info(f'文件chunk：{chunk_index} 上传成功')
            return JSONResponse(content={"message": "Chunk uploaded successfully", "chunk_index": chunk_index},
                                status_code=200)


async def monitor_upload_info():
    """监控上传链接信息 -> SQLite"""
    global downloaded_enter_lock
    while True:
        try:
            async with downloaded_enter_lock:
                # 使用 list() 创建字典的静态副本，避免遍历时字典被修改
                for link_id, upload_links_info in list(downloaded_chunks_info.items()):  # 遍历所有上传链接
                    async with upload_links_info['lock']:
                        # logger.info(f"处理文件: {upload_links_info['info']['file_name']}")
                        msql.update_upload_link(upload_links_info['info']['file_hash'],
                                                expires_at=upload_links_info['info']['expires_at'],
                                                url_using=False,
                                                downloaded_chunks=upload_links_info['info']['downloaded_chunks'])
                    # logger.info(f"结束文件: {upload_links_info['info']['file_name']}")
            # logger.info(f"缓存使用：{cache_used}, 总共：{MAX_CACHE_SIZE}, 剩余：{MAX_CACHE_SIZE - cache_used}, 占用百分比：{cache_used / MAX_CACHE_SIZE * 100}%")
            await asyncio.sleep(0.5)  # 每隔0.5秒检查一次
        except Exception as e:
            logger.error(f"监控上传链接信息时发生错误: {e}")


@app.get("/upload/{link_id}/upload_progress")
async def get_upload_progress(link_id: str):
    """获取文件上传进度"""
    sql_data = msql.get_upload_link(link_id, search_by='link_id')
    if sql_data is not None:
        server_range_json = json.loads(sql_data['server_range_json'])
        async with downloaded_chunks_info[link_id]['lock']:  # 加锁
            downloaded_chunks = server_range_json['downloaded_chunks']
            total_chunks = server_range_json['total_chunks']
        return JSONResponse(content={"progress": len(downloaded_chunks) / total_chunks * 100}, status_code=200)
    else:
        return JSONResponse(content={"error": "File not found"}, status_code=404)


async def run_server_FastAPI():
    """同时运行 FastAPI 服务器和监控任务"""
    # 启动监控任务
    monitor_task = asyncio.create_task(monitor_upload_info())

    # 启动 FastAPI 服务器
    config = uvicorn.Config(app, host="127.0.0.1", port=5000, log_level="warning")
    server = uvicorn.Server(config)
    await server.serve()


if __name__ == "__main__":

    asyncio.run(run_server_FastAPI())


