import asyncio
import json
import os
import shutil
import uuid
from enum import Enum
from functools import cached_property
from subprocess import check_call
from typing import IO, Any, Dict, Iterator, List

import peewee
import requests
from loguru import logger
from playhouse.postgres_ext import JSONField
from pydantic import BaseModel, Field


class TaskStatus(str, Enum):
    PENDING = "pending"
    RUNNING = "running"
    COMPLETED = "completed"
    FAILED = "failed"
    CANCELLED = "cancelled"


class TaskSpec(BaseModel):
    commit: str
    fetch_url: str
    task: str
    task_hash: str
    param: Dict[str, Any]
    tag: List[str] = Field(default_factory=list)


class TaskFile(BaseModel):
    task_hash: str | None = None
    file_id: str
    type_: str
    content_type: str


database_proxy = peewee.DatabaseProxy()


class BaseDB(peewee.Model):
    class Meta:
        database = database_proxy


class TableTask(BaseDB):
    task_hash = peewee.CharField(unique=True, index=True)
    commit = peewee.CharField()
    fetch_url = peewee.CharField()
    task = peewee.CharField()
    param = JSONField()


class TableTaskStatus(BaseDB):
    task_hash = peewee.CharField(
        unique=True,
        index=True,
    )
    status = peewee.TextField(default=TaskStatus.PENDING)


class TableFile(BaseDB):
    task_hash = peewee.CharField(unique=False, index=True, null=True)
    file_id = peewee.CharField(
        unique=True, index=True, default=lambda: str(uuid.uuid4())
    )
    type_ = peewee.TextField(index=True)  # log, output, file
    content_type = peewee.TextField()  # mime type


class TaskManager:
    """
    Task manager for the task runner.
    All functions must be thread-safe.
    This is used by the server only.
    """

    def __init__(self):
        # Start of Selection
        self.file_root = os.path.join(
            os.environ.get("STORAGE_HOME", ""), "taskrunner_files"
        )
        if not os.path.exists(self.file_root):
            os.makedirs(self.file_root)
        # check if the proxy is already initialized
        try:
            if database_proxy._database is not None:
                return
        except AttributeError:
            pass
        admin_db = peewee.PostgresqlDatabase(
            "postgres",
            host="db",
            port=5432,
            user="root",
            password="example",
        )
        admin_db.connect()
        result = admin_db.execute_sql(
            "SELECT 1 FROM pg_database WHERE datname = 'taskrunner'"
        )
        if not result.fetchone():
            admin_db.execute_sql("CREATE DATABASE taskrunner")
        admin_db.close()
        db = peewee.PostgresqlDatabase(
            "taskrunner",
            host="db",
            port=5432,
            user="root",
            password="example",
        )
        database_proxy.initialize(db)

        # set up the database
        # set the task_hash as unique index if not exists
        TableTask.create_table()
        TableTaskStatus.create_table()
        TableFile.create_table()

    def _path_for_file(self, file_id: str) -> str:
        return os.path.join(self.file_root, file_id)

    def put_task(self, task: TaskSpec):
        task_dict = task.model_dump()
        try:
            TableTask.create(**task_dict)
        except peewee.IntegrityError:
            logger.warning(f"Task {task.task_hash} already exists")

    def get_tasks(
        self,
        offset: int = 0,
        limit: int = 100,
        reverse_order: bool = False,
    ) -> List[TaskSpec]:
        tasks = [
            TaskSpec.model_validate(task)
            for task in TableTask.select()
            .order_by(TableTask.id.desc() if reverse_order else TableTask.id)
            .limit(limit)
            .offset(offset)
            .dicts()
        ]
        return tasks

    @cached_property
    def _query_pending_tasks(self):
        return TableTask.select().where(
            ~TableTask.task_hash.in_(TableTaskStatus.select(TableTaskStatus.task_hash))
        )

    def get_pending_tasks(self) -> List[TaskSpec]:
        tasks = [
            TaskSpec.model_validate(task) for task in self._query_pending_tasks.dicts()
        ]
        logger.info(f"Found {len(tasks)} ready tasks")
        return tasks

    def _get_by_hash(self, task_hash: str) -> TaskSpec | None:
        task = TableTask.get_or_none(TableTask.task_hash == task_hash)
        if task is None:
            return None
        return TaskSpec.model_validate(task.__data__)

    def put_result(self, task_hash: str, file_uuid: str):
        status = self.get_task_status(task_hash)
        assert status == TaskStatus.RUNNING, f"Task {task_hash} is not running"
        TableFile.create(
            task_hash=task_hash,
            file_id=file_uuid,
            type_="output",
            content_type="application/pickle",
        )

    def get_result(self, task_hash: str, index: int) -> TaskFile | None:
        result = (
            TableFile.select()
            .where(TableFile.task_hash == task_hash, TableFile.type_ == "output")
            .order_by(TableFile.id)
            .limit(1)
            .offset(index)
            .first()
        )
        return TaskFile.model_validate(result.__data__) if result else None

    def put_log(self, task_hash: str, log_uuid: str, content_type: str):
        status = self.get_task_status(task_hash)
        assert status == TaskStatus.RUNNING, f"Task {task_hash} is not running"
        TableFile.create(
            task_hash=task_hash,
            file_id=log_uuid,
            type_="log",
            content_type=content_type,
        )

    def get_logs(
        self, task_hash: str, limit: int = 100, offset: int = 0
    ) -> Iterator[Dict[str, Any]]:
        results = (
            TableFile.select()
            .where(TableFile.task_hash == task_hash, TableFile.type_ == "log")
            .order_by(TableFile.id)
            .limit(limit)
            .offset(offset)
            .dicts()
        )
        for result in results:
            result = TaskFile.model_validate(result)
            file = self.get_file(result.file_id)
            yield json.load(file)

    def add_file(self, file: TaskFile) -> str:
        result = TableFile.create(**file.model_dump())
        return result.file_id

    def add_file_chunk(self, file_id: str, payload: bytes, index: int):
        chunk_file_name = f"{file_id}.{index}"
        chunk_file_path = self._path_for_file(chunk_file_name)
        with open(chunk_file_path, "wb") as f:
            f.write(payload)
        return True

    def finish_file(self, file_id: str):
        file_path = self._path_for_file(file_id)
        if os.path.exists(file_path):
            raise Exception(f"File {file_id} already exists")
        file = open(file_path, "wb")
        for i in range(9999999999):
            chunk_file_path = self._path_for_file(f"{file_id}.{i}")
            if os.path.exists(chunk_file_path):
                file.write(open(chunk_file_path, "rb").read())
                # remove the chunk file
                os.remove(chunk_file_path)
            else:
                break
        file.close()
        return True

    def get_file(self, file_id: str) -> IO[bytes]:
        """
        Retrieves the file as a file-like object that can be read.
        """
        file_path = self._path_for_file(file_id)
        return open(file_path, "rb")

    def get_file_info(self, file_id: str) -> TaskFile:
        """
        Get information about a file by its ID.

        Args:
            file_id: The ID of the file to get information about

        Returns:
            TaskFile object containing file information
        """
        file = TableFile.get_or_none(TableFile.file_id == file_id)
        if file is None:
            raise ValueError(f"File {file_id} not found")
        return TaskFile.model_validate(file.__data__)

    def acquire_task(self, task_hash: str) -> TaskSpec | None:
        """
        acquire a task from the database, and mark it as running
        This function is atomic, and will return None if the task is not found or not pending.
        """
        try:
            TableTaskStatus.insert(
                task_hash=task_hash, status=TaskStatus.RUNNING
            ).execute()
        except peewee.IntegrityError:
            return None
        return self._get_by_hash(task_hash)

    def clear_task(self, task_hash: str):
        logger.info(f"Clearing task {task_hash}")
        self.reset_task(task_hash)
        TableTask.delete().where(TableTask.task_hash == task_hash).execute()

    def reset_task(self, task_hash):
        """
        remove status files, but not the task file itself
        """
        for file in TableFile.select().where(TableFile.task_hash == task_hash):
            try:
                logger.info(f"Deleting file {file.file_id}")
                os.remove(self._path_for_file(file.file_id))
            except Exception as e:
                logger.error(f"Failed to delete file {file.file_id}: {e}")
        TableTaskStatus.delete().where(TableTaskStatus.task_hash == task_hash).execute()
        TableFile.delete().where(TableFile.task_hash == task_hash).execute()

    def acquire_ready_task(self) -> TaskSpec | None:
        """
        return a ready to run task.
        TODO: make a score mechanism to select the task to run.
        """
        list_tasks = list(self._query_pending_tasks.dicts())
        import random

        random.shuffle(list_tasks)
        for task in list_tasks:
            task = TaskSpec.model_validate(task)
            for k, v in task.param.items():
                if not isinstance(v, dict):
                    continue
                dependency_task = v.get("__task_hash", None)
                logger.info(f"checking dependency_task: {dependency_task}")
                if dependency_task is None:
                    continue
                dependency_task_status = self.get_task_status(dependency_task)
                if dependency_task_status == TaskStatus.COMPLETED:
                    pass
                elif dependency_task_status == TaskStatus.RUNNING:
                    break
                elif dependency_task_status == TaskStatus.FAILED:
                    self.set_task_status(task.task_hash, TaskStatus.FAILED)
                    break
                elif dependency_task_status == TaskStatus.CANCELLED:
                    self.set_task_status(task.task_hash, TaskStatus.CANCELLED)
                    break
                else:
                    break
            else:
                return self.acquire_task(task.task_hash)
        return None

    def get_task_status(self, task_hash: str) -> TaskStatus:
        result = TableTaskStatus.get_or_none(TableTaskStatus.task_hash == task_hash)
        if result is None:
            return TaskStatus.PENDING
        return result.status

    def set_task_status(self, task_hash: str, status: TaskStatus):
        # check if status is valid
        assert status in TaskStatus.__members__.values(), f"Invalid status: {status}"
        assert isinstance(status, str), f"Status must be a string: {status}"
        TableTaskStatus.insert(task_hash=task_hash, status=status).on_conflict(
            conflict_target=[TableTaskStatus.task_hash],
            update={TableTaskStatus.status: status},
        ).execute()

    def prune(self):
        """
        prune all files that are not in the db
        """
        files = set()
        for file in TableFile.select(TableFile.file_id):
            files.add(file.file_id)
        logger.info(f"Found {len(files)} files in the db")
        logger.info(
            f"Found {len(os.listdir(self.file_root))} files in the file root {self.file_root}"
        )
        for file in os.listdir(self.file_root):
            if file not in files:
                try:
                    logger.info(f"Deleting file {file}")
                    os.remove(self._path_for_file(file))
                except Exception as e:
                    logger.error(f"Failed to delete file {file}: {e}")

    def get_task_files(self, task_hash: str) -> List[TaskFile]:
        """
        Get all files associated with a task, excluding log files
        """
        files = (
            TableFile.select()
            .where((TableFile.task_hash == task_hash) & (TableFile.type_ != "log"))
            .order_by(TableFile.id)
            .dicts()
        )
        return [TaskFile.model_validate(file) for file in files]


class ResponsePutTask(BaseModel):
    success: bool
    message: str


class ResponseGetTaskStatus(BaseModel):
    success: bool
    status: TaskStatus


class ResponsePostFile(BaseModel):
    success: bool


class ResponsePostFileChunk(BaseModel):
    success: bool


class ResponsePostFileFinish(BaseModel):
    success: bool


class ResponseClearTask(BaseModel):
    success: bool


class ResponseGetTaskOutput(BaseModel):
    success: bool
    output: TaskFile | None


class ResponseSetTaskStatus(BaseModel):
    success: bool


class NoMoreFileException(Exception):
    pass


class DependencyTaskError(Exception):
    pass


class ResponseCheckTaskOutput(BaseModel):
    ready: bool
    task_status: TaskStatus
    record: TaskFile | None


class ResponseGetLogs(BaseModel):
    success: bool
    logs: List[Dict[str, Any]]


class ResponseGetTaskFiles(BaseModel):
    success: bool
    files: List[TaskFile]


def try_server():
    server_list = [
        "http://localhost:8000",
        "https://server.tail13fe1.ts.net/rr_server",
        "http://rr_server:8000",
    ]
    for i in server_list:
        try:
            resp = requests.get(i, timeout=2)
            if resp.text == '"RR_OK"':
                print("Using server", i)
                return i
        except Exception as e:
            pass
    raise RuntimeError("No Server Error")


class ServerHelper:
    """
    This is used by the worker manager to interact with the server.
    """

    def __init__(self, server_url: str | None = None):
        if server_url is None:
            server_url = os.environ.get("TASKRUNNER_SERVER_URL", None)
        if server_url is None:
            server_url = try_server()
        self.server_url = server_url
        self._file_cache_dir = os.path.join(
            os.environ.get("STORAGE_HOME", os.environ.get("HOME")),
            "taskrunner_files",
        )
        if not os.path.exists(self._file_cache_dir):
            os.makedirs(self._file_cache_dir)

    def put(self, path: str, payload: BaseModel):
        response = requests.put(self.server_url + path, json=payload)
        return response.json()

    def _post(self, path, payload: BaseModel | bytes):
        req_path = self.server_url + path
        if isinstance(payload, BaseModel):
            response = requests.post(req_path, json=payload.model_dump())
        elif isinstance(payload, bytes):
            response = requests.post(req_path, data=payload)
        else:
            raise ValueError(f"Invalid payload type: {type(payload)}")
        try:
            return response.json()
        except json.JSONDecodeError:
            return response.text

    def add_file(
        self,
        file_path: str,
        file_id: str | None = None,
        task_hash: str | None = None,
        type_: str = "file",
        content_type: str = "application/raw",
    ):
        if file_id is None:
            file_id = str(uuid.uuid4())
        task_file = TaskFile(
            file_id=file_id,
            task_hash=task_hash,
            type_=type_,
            content_type=content_type,
        )
        resp1 = ResponsePostFile.model_validate(self._post(f"/file", task_file))
        assert resp1.success, f"Failed to add file {file_path}"
        assert os.path.exists(file_path), f"File {file_path} does not exist"
        for i, chunk in enumerate(self._read_file_in_chunks(file_path)):
            resp2 = ResponsePostFileChunk.model_validate(
                self._post(f"/file_chunk/{file_id}/{i}", chunk)
            )
            assert resp2.success, f"Failed to add chunk {i} of file {file_path}"
        resp3 = ResponsePostFileFinish.model_validate(
            self._post(f"/file_finish/{file_id}", b"")
        )
        assert resp3.success, f"Failed to finish file {file_path}"

    def _read_file_in_chunks(
        self, file_path: str, chunk_size: int = 1024 * 1024
    ) -> Iterator[bytes]:
        with open(file_path, "rb") as f:
            while True:
                chunk = f.read(chunk_size)
                if not chunk:
                    break
                yield chunk

    def get_file(self, file_id: str) -> IO[bytes]:
        resp = requests.get(self.server_url + f"/file/{file_id}", stream=True)
        assert resp.status_code == 200, f"Failed to get file {file_id}"
        return resp.raw

    def download_file(self, file_id: str, download_path: str | os.PathLike):
        cache_path = os.path.join(self._file_cache_dir, file_id)
        if os.path.exists(download_path):
            raise FileExistsError(f"File {file_id} already exists at {download_path}")
        download_root = os.path.dirname(download_path)
        if not os.path.exists(download_root):
            logger.info(f"Creating directory {download_root}")
            os.makedirs(download_root)
        if not os.path.exists(cache_path):
            with open(cache_path, "wb") as f:
                f_down = self.get_file(file_id)
                shutil.copyfileobj(f_down, f)
        os.link(cache_path, download_path)

    def get_logs(
        self, task_hash: str, limit: int = 100, offset: int = 0
    ) -> Iterator[Dict[str, Any]]:
        resp = requests.get(
            self.server_url + f"/get_logs/{task_hash}/{limit}/{offset}"
        ).json()
        return ResponseGetLogs.model_validate(resp).logs

    async def download_task_arg_async(
        self,
        task_hash: str,
        index: int,
        store_path: str,
        check_duration: int = 1,
    ):
        """ """
        if os.path.exists(store_path):
            return
        while True:
            resp = ResponseCheckTaskOutput.model_validate(
                requests.get(
                    self.server_url + f"/check_task_output/{task_hash}/{index}"
                ).json()
            )
            if resp.ready:
                self.download_file(resp.record.file_id, store_path)
                return
            if resp.task_status == TaskStatus.RUNNING:
                await asyncio.sleep(check_duration)
                continue
            if resp.task_status == TaskStatus.COMPLETED:
                raise NoMoreFileException()
            raise DependencyTaskError(
                f"Failed to get task arg {task_hash} {index}, status code: {resp.status_code}"
            )

    def clear_task(self, task_hash: str):
        resp = requests.delete(self.server_url + f"/task/{task_hash}")
        return resp.json()

    def acquire_ready_task(self) -> TaskSpec | None:
        resp = requests.get(self.server_url + "/ready_task/")
        if resp.status_code != 200:
            return None
        return TaskSpec.model_validate(resp.json())

    def set_task_status(self, task_hash: str, status: TaskStatus):
        resp = requests.post(
            self.server_url + f"/task_status/{task_hash}/{status.value}",
        )
        return resp.json()

    def get_task_status(self, task_hash: str) -> TaskStatus:
        resp = requests.get(self.server_url + f"/task_status/{task_hash}")
        resp = ResponseGetTaskStatus.model_validate(resp.json())
        assert resp.success, f"Failed to get task status {task_hash}"
        return resp.status

    def get_task_spec(self, task_hash: str) -> TaskSpec:
        resp = requests.get(self.server_url + f"/task_spec/{task_hash}")
        return TaskSpec.model_validate(resp.json())

    def get_task_files(self, task_hash: str) -> List[TaskFile]:
        response = requests.get(self.server_url + f"/task_files/{task_hash}")
        return ResponseGetTaskFiles.model_validate(response.json()).files

    def get_file_info(self, file_id: str) -> TaskFile:
        """
        Get information about a file by its ID.

        Args:
            file_id: The ID of the file to get information about

        Returns:
            TaskFile object containing file information
        """
        file = TableFile.get_or_none(TableFile.file_id == file_id)
        if file is None:
            raise ValueError(f"File {file_id} not found")
        return TaskFile.model_validate(file.__data__)


def prepare_root(task_spec: TaskSpec, target_dir: str | None = None) -> str:
    fetch_url = task_spec.fetch_url
    commit = task_spec.commit
    task_hash = task_spec.task_hash
    if target_dir is None:
        target_dir = os.path.join(
            os.environ.get("STORAGE_HOME", ""), "git_hash", task_hash
        )
    base_dir = os.path.join(
        os.environ.get("STORAGE_HOME", ""), "git", fetch_url.split("/")[-1]
    )
    if not os.path.exists(os.path.join(base_dir, ".git")):
        logger.info(f"cloning {fetch_url} to {base_dir}")
        assert check_call(f"git clone {fetch_url} {base_dir}".split()) == 0
        assert os.path.exists(os.path.join(base_dir, ".git"))
    else:
        logger.info(f"pulling from remote")
        assert check_call("git pull".split(), cwd=base_dir) == 0
    if os.path.exists(target_dir):
        shutil.rmtree(target_dir)
    assert not os.path.exists(target_dir)
    assert (
        check_call(f"git clone {base_dir} {target_dir}".split()) == 0
    ), "git clone failed"
    assert (
        check_call(f"git checkout {commit}".split(), cwd=target_dir) == 0
    ), "git checkout failed"
    assert check_call(f"git clean -fx".split(), cwd=target_dir) == 0, "git clean failed"
    return target_dir
