from fastapi import APIRouter, HTTPException, Query, WebSocket, Request, Response
from fastapi.responses import StreamingResponse
from docker import errors

import docker
import asyncio
import threading
import logging
import gzip
import io
import os
import tempfile

logger = logging.getLogger("uvicorn")


from app.schemas.docker_images import DockerImageSchema, DockerContainerSchema

router = APIRouter(prefix="/docker", tags=["Docker"])

client = docker.from_env()
# --------------------------images API-----------------------------
@router.get("/getDockerImages")
def list_images():
    list_images_result = []
    images = client.images.list()
    if images.__len__() == 0:
        return []
    for img in images:
        if img.tags.__len__() >=2:
            for tag in img.tags:
                list_images_result.append(
                    DockerImageSchema(
                        REPOSITORY=tag.split(":")[0],
                        TAG=tag.split(":")[1],
                        IMAGE_ID=img.id,
                        CREATED=img.attrs.get("Created", ""),
                        SIZE=img.attrs.get("Size", 0)
                    )
                )
        elif img.tags.__len__() == 1:
            tag = img.tags[0]
            list_images_result.append(
                DockerImageSchema(
                    REPOSITORY=tag.split(":")[0],
                    TAG=tag.split(":")[1],
                    IMAGE_ID=img.id,
                    CREATED=img.attrs.get("Created", ""),
                    SIZE=img.attrs.get("Size", 0)
                )
            ) 
    return list_images_result


# 单个镜像导出并压缩
@router.get("/images/{image_id}/export")
def export_image(image_id: str):
    try:
        image = client.images.get(image_id)
    except errors.ImageNotFound:
        raise HTTPException(status_code=404, detail=f"镜像 {image_id} 未找到")

    
    # 1. 生成 gzip 文件到临时目录
    tar_stream = image.save(named=True)
    temp_file = tempfile.NamedTemporaryFile(delete=False, suffix=".tar.gz")
    temp_path = temp_file.name

    
    with gzip.open(temp_file, 'wb') as gz:
        for chunk in tar_stream:
            gz.write(chunk)
    temp_file.close()


    # 2. 获取文件大小
    file_size = os.path.getsize(temp_path)

    
    # 3. 返回 StreamingResponse
    def file_iterator(path):
        with open(path, 'rb') as f:
            while chunk := f.read(8192):
                yield chunk
        os.remove(path)  # 下载完成后删除临时文件
    return StreamingResponse(
        file_iterator(temp_path),
        media_type="application/gzip",
        headers={
            "Content-Disposition": f"attachment; filename={image_id.replace(':', '_')}.tar.gz",
            "Content-Length": str(file_size)
        }
    )



# 批量导出多个镜像
@router.post("/images/export")
def export_multiple_images(image_ids: list[str]):
    if not image_ids:
        raise HTTPException(status_code=400, detail="必须提供镜像 ID 列表")

    def gzip_stream():
        buffer = io.BytesIO()
        with gzip.GzipFile(fileobj=buffer, mode='wb') as gz:
            for image_id in image_ids:
                try:
                    image = client.images.get(image_id)
                    for chunk in image.save(named=True):
                        gz.write(chunk)
                except errors.ImageNotFound:
                    continue
                gz.flush()
                yield buffer.getvalue()
                buffer.seek(0)
                buffer.truncate(0)

    return StreamingResponse(
        gzip_stream(),
        media_type="application/gzip",
        headers={
            "Content-Disposition": "attachment; filename=docker_images_bundle.tar.gz"
        }
    )


# 支持断点续传（Range 请求）
@router.get("/images/{image_id}/export-range")
async def export_image_range(image_id: str, request: Request):
    try:
        image = client.images.get(image_id)
    except errors.ImageNotFound:
        raise HTTPException(status_code=404, detail=f"镜像 {image_id} 未找到")

    tar_path = f"/tmp/{image_id.replace(':', '_')}.tar.gz"
    if not os.path.exists(tar_path):
        # 先生成压缩文件
        with open(tar_path, "wb") as f:
            with gzip.GzipFile(fileobj=f, mode='wb') as gz:
                for chunk in image.save(named=True):
                    gz.write(chunk)

    file_size = os.path.getsize(tar_path)
    range_header = request.headers.get("range")
    if range_header:
        start, end = range_header.replace("bytes=", "").split("-")
        start = int(start)
        end = int(end) if end else file_size - 1
        length = end - start + 1

        with open(tar_path, "rb") as f:
            f.seek(start)
            data = f.read(length)

        return Response(
            content=data,
            status_code=206,
            headers={
                "Content-Range": f"bytes {start}-{end}/{file_size}",
                "Accept-Ranges": "bytes",
                "Content-Length": str(length),
                "Content-Type": "application/gzip",
            }
        )

    return Response(
        content=open(tar_path, "rb").read(),
        headers={"Content-Type": "application/gzip"}
    )

# --------------------------container API-----------------------------

@router.get("/getAllContainers")
def list_all_containers():
    list_contsiner_result = []
    containers = client.containers.list(all=True)
    if not containers:
        return []
    logger.info(f"[Total {len(containers)} containers found, fetching details...]")
    for cont in containers:
        # logger.info(json.dumps(cont.attrs, indent=4))
        container_name = cont.name
        container_id = cont.id
        image_name = cont.attrs.get("Config", {}).get("Image", "")
        container_command = cont.attrs.get("Path", "")
        container_created = cont.attrs.get("Created", 0)
        container_status = cont.status
        container_port = str(cont.attrs.get("NetworkSettings", {}).get("Ports", ""))
        list_contsiner_result.append(
            DockerContainerSchema(
                NAME=container_name,
                CONTAINER_ID=container_id,
                IMAGE=image_name,
                COMMAND=container_command,
                CREATED=container_created,
                STATUS=container_status,
                PORTS=container_port
            )
        )
        logger.info(f"[{container_name} {container_id} {image_name} {container_command} {container_created} {container_status} {container_port}]")
    return list_contsiner_result

@router.post("/startContainer/{container_id}")
def start_container(container_id: str):
    result = {"status": "failed", "message": ""}
    container = client.containers.get(container_id)
    if not container:
        return {"error": "Container not found."}
    try:
        container.start()
        result["status"] = "success"
        result["message"] = f"Container {container_id} started."
    except Exception as e:
        logger.error(f"Error starting container {container_id}: {e}")
        result["message"] = str(e)
        return result
    return result

@router.post("/stopContainer/{container_id}")
def stop_container(container_id: str):
    result = {"status": "failed", "message": ""}
    container = client.containers.get(container_id)
    if not container:
        return {"error": "Container not found."}
    try:
        container.stop()
        result["status"] = "success"
        result["message"] = f"Container {container_id} stopped."
    except Exception as e:
        logger.error(f"Error stopping container {container_id}: {e}")
        result["message"] = str(e)
        return result
    return result


# HTTP 流式日志接口
@router.get("/containers/{container_id}/logs")
def get_container_logs(
    container_id: str,
    tail: int = Query(100, description="显示最近 N 行日志"),
    since: int = Query(None, description="显示某个时间戳之后的日志")
):
    def stream_logs():
        try:
            container = client.containers.get(container_id)
        except errors.NotFound:
            yield f"容器 {container_id} 未找到\n"
            return

        for line in container.logs(stream=True, follow=True, timestamps=True, tail=tail, since=since):
            yield line.decode("utf-8")

    return StreamingResponse(stream_logs(), media_type="text/plain")


# WebSocket 实时日志接口
@router.websocket("/ws/containers/{container_id}/logs")
async def websocket_logs(websocket: WebSocket, container_id: str):
    await websocket.accept()
    
    try:
        container = client.containers.get(container_id)
    except errors.NotFound:
        await websocket.send_text(f"容器 {container_id} 未找到")
        await websocket.close()
        return
    loop = asyncio.get_running_loop() # 获取当前时间循环
    stop_flag = False
    def send_logs():
        for line in container.logs(stream=True, follow=True, timestamps=True):
            if stop_flag:
                break
            asyncio.run_coroutine_threadsafe(websocket.send_text(line.decode("utf-8")),
                                             loop)

    thread = threading.Thread(target=send_logs)
    thread.start()
    try:
        while True:
            if websocket.client_state.name != "CONNECTED":
                stop_flag = True
                break
            await asyncio.sleep(0.5)  
    except Exception as e:
        await websocket.send_text(f"获取日志时出错: {str(e)}")
    finally:
        await websocket.close()
        thread.join()
