import asyncio
import threading
from concurrent.futures import ThreadPoolExecutor
from functools import partial
from typing import Callable, Dict

from aiohttp import web
from aiohttp.web_request import Request

import snail
from config import AIOHTTP_PARAM_EXECUTOR_NAME, SNAIL_HOST_PORT
from log import SnailLog, SnailLogContext
from rpc import send_dispatch_result, send_heartbeat
from schemas import (
    DispatchJobRequest,
    DispatchJobResult,
    ExecuteResult,
    ExecutorTypeEnum,
    NettyResult,
    StatusEnum,
    StopJobRequest,
)

__all__ = ["job", "ExecutorManager", "run_server", "create_tasks_on_loop"]


class SnailJobError(RuntimeError):
    pass


async def handle_dispatch(request: Request):
    data = await request.json()
    dispatchJobRequest = DispatchJobRequest(**data)
    SnailLog.LOCAL.info(f"接收到的任务执行请求: reqId={dispatchJobRequest.reqId}")
    manager: ExecutorManager = request.app[AIOHTTP_PARAM_EXECUTOR_NAME]
    asyncio.create_task(manager.execute(dispatchJobRequest))

    return web.json_response(
        NettyResult(
            status=StatusEnum.STATUS_SUCCESS,
            reqId=dispatchJobRequest.reqId,
            data=True,
        ).model_dump(mode="json")
    )


async def handle_stop(request: Request):
    data = await request.json()
    stopJobRequest = StopJobRequest(**data)
    SnailLog.LOCAL.info(f"接收到的任务停止请求: reqId={stopJobRequest.reqId}")
    manager: ExecutorManager = request.app[AIOHTTP_PARAM_EXECUTOR_NAME]
    manager.stop(stopJobRequest)

    return web.json_response(
        NettyResult(
            status=StatusEnum.STATUS_SUCCESS,
            reqId=stopJobRequest.reqId,
            data=True,
        ).model_dump(mode="json")
    )


async def run_server(executor_manager: "ExecutorManager"):
    """web server 协程"""
    app = web.Application()

    # 注册执行器
    app[AIOHTTP_PARAM_EXECUTOR_NAME] = executor_manager

    # 定义端点处理器
    app.router.add_post("/job/dispatch/v1", handle_dispatch)
    app.router.add_post("/job/stop/v1", handle_stop)

    # 启动aio服务器
    runner = web.AppRunner(app)
    await runner.setup()
    site = web.TCPSite(runner, port=SNAIL_HOST_PORT)
    await site.start()
    # 等待服务器启动
    while True:
        await asyncio.sleep(1)


def job(executor_name: str):
    """任务执行器装饰器

    Args:
        executor_name (str): 执行器名称
    """

    def decorator(func):
        func.executor_name = executor_name
        return func

    return decorator


class ExecutorManager:
    """执行管理器"""

    def __init__(self):
        # executorName => callable
        self.executors: Dict[str, Callable] = {}
        # taskBatchId => ThreadPoolExecutor
        self.executor_pools: Dict[int, ThreadPoolExecutor] = {}
        self.lock = threading.Lock()

    def register(self, executor: callable):
        """注册执行器

        Args:
            executor (callable): 执行器函数, 必须为 `@job` 装饰的 函数 或 协程

        Raises:
            SnailJobError: 执行器函数没有 @job("executorName") 装饰，或者不是 callable 抛出异常
        """
        if not callable(executor):
            raise SnailJobError(f"Parameter `executor` is not callable")

        if not hasattr(executor, "executor_name"):
            raise SnailJobError(
                f"[{executor.__name__}] is not a job, decorate it with @job"
            )

        if executor.__name__ in self.executors:
            raise SnailJobError(f"Executor name [{executor.__name__}] already exists")

        self.executors[executor.executor_name] = executor
        SnailLog.LOCAL.info(f"成功注册执行器: {executor.executor_name}")

    @staticmethod
    def execute_wrapper(executor: callable, args) -> ExecuteResult:
        # 设置log context
        SnailLog.set_context(
            SnailLogContext(
                jobId=args.jobId,
                taskId=args.taskId,
                taskBatchId=args.taskBatchId,
            )
        )
        try:
            # TODO: 后续校验
            # assert (
            #     args.executorType == ExecutorTypeEnum.PYTHON
            # ), "执行器类型必须为 Python"
            return executor(args.argsStr)
        except Exception as ex:
            SnailLog.REMOTE.exception(str(ex))
            return ExecuteResult.failure("任务执行异常")

    async def execute(self, dispatch_args: DispatchJobRequest):
        """执行任务批次

        Args:
            executor_name (str): 执行器名称
            dispatch (str, optional): 执行信息
        """

        try:
            assert len(dispatch_args.args) > 0, "args 不能为空"
            args = dispatch_args.args[0]
            executor = self.executors.get(args.executorInfo)
            assert executor is not None, f"找不到对应 name={args.executorInfo} 执行器"
            SnailLog.LOCAL.info(f"Executor {args.executorInfo} found.")

            # 判断是否已经存在 taskBatchId 对应的线程池
            with self.lock:
                if args.taskBatchId not in self.executor_pools:
                    executor_pool = ThreadPoolExecutor(
                        max_workers=args.parallelNum,
                        thread_name_prefix=f"snail-job-task-{args.taskBatchId}",
                    )
                    self.executor_pools[args.taskBatchId] = executor_pool

            loop = asyncio.get_event_loop()
            result: ExecuteResult = await loop.run_in_executor(
                self.executor_pools[args.taskBatchId],
                partial(ExecutorManager.execute_wrapper, executor, args),
            )
            dispatch_result = DispatchJobResult.build(
                dispatchJobRequest=dispatch_args,
                executeResult=result,
            )
            send_dispatch_result(dispatch_result)
            # 删除线程池缓存
            if args.taskBatchId in self.executor_pools:
                del self.executor_pools[args.taskBatchId]
        except Exception as ex:
            SnailLog.LOCAL.exception(str(ex))

    def stop(self, stop_request: StopJobRequest):
        """停止任务批次

        Args:
            stop_request (StopJobRequest): 任务停止请求
        """
        assert len(stop_request.args) > 0, "args 不能为空"
        args = stop_request.args[0]

        if args.taskBatchId in self.executor_pools:
            executor_pool = self.executor_pools[args.taskBatchId]
            executor_pool.shutdown(wait=False, cancel_futures=True)
            del self.executor_pools[args.taskBatchId]


def create_manager_on_loop(loop: asyncio.AbstractEventLoop) -> ExecutorManager:
    """在事件循环上执行任务，并返回任务执行管理器，用户拿到执行管理器后注册执行器

    Args:
        loop (asyncio.AbstractEventLoop): 事件循环

    Returns:
        ExecutorManager: 执行管理器
    """
    manager = ExecutorManager()
    loop.create_task(snail.run_server(manager))
    loop.create_task(send_heartbeat())

    return manager
