
from datetime import datetime, timedelta
from typing import Union

import orjson
from src import settings

from src.db import task_op
from src.db.table import CrawlerTask
from src.enum.exception import ExceptionEnum
from src.enum.task import CrawlerStatusEnum, CrawlerTypeEnum, ExecResultEnum
from src.model.request import CrawlerRequest, WaybillParams, VoyageLineParams
from src.model.crawler import CrawlerId, CrawlerResult
from src.model import response_ok, response_error, ResponseModel
from src.exception import ResponseException, ApplicationException
from src.util import common_util
from starlette import status

class CrawlerTaskServer:
    # 创建crawler_id
    def create_crawler_id(self,crawler_type: CrawlerTypeEnum,crawler_params: Union[WaybillParams | VoyageLineParams]) -> str:
        if crawler_type == CrawlerTypeEnum.OCEAN_WAYBILL or crawler_type == CrawlerTypeEnum.AIR_WAYBILL:
            crawler_info = f"{crawler_type.value}|{crawler_params.waybill_num.strip()}|{crawler_params.uni_code.strip()}"
        elif crawler_type == CrawlerTypeEnum.VOYAGE_ROUTE:
            crawler_info = f"{crawler_type.value}|{crawler_params.uni_code.strip()}"
        else:
            crawler_info = ""

        return common_util.convert_md5(crawler_info)

    # 创建爬虫任务
    def create(self,request:CrawlerRequest):
        check_result = self.check_crawler_params(request)
        if check_result is not True:
            return check_result

        crawler_id = self.create_crawler_id(
            crawler_type=request.crawler_type,
            crawler_params=request.crawler_params
        )

        task = task_op.query_by_crawler_id(crawler_id)
        if task is None:
            new_task = CrawlerTask(
                crawler_id=crawler_id,
                crawler_type=request.crawler_type.value,
                crawler_params=request.crawler_params.model_dump_json(),
            )
            task_op.create(task=new_task)
        return response_ok(
                data = CrawlerId(crawler_id=crawler_id)
            )

    # 执行爬虫任务
    def run(self,crawler_id:str):
        task = task_op.query_by_crawler_id(crawler_id)
        if task is None:
            raise ApplicationException(ExceptionEnum.CRAWLER_ID_NOT_FOUND_ERROR)
        if task.exec_result == ExecResultEnum.SUCCESS and datetime.now() - task.exec_time < timedelta(hours=1):
            raise ResponseException(
                status_code=status.HTTP_429_TOO_MANY_REQUESTS,
                error=ExceptionEnum.CRAWLER_EXEC_RATE_TOO_HIGH,
            )
        task.crawler_status = CrawlerStatusEnum.EXECUTING.value
        task.crawler_times = 0
        task.exec_time = datetime.now()
        task.reason = None
        task.exec_result = ExecResultEnum.INIT.value
        task_op.update(task=task)

        if task.crawler_params is not None:
            crawler_params = orjson.loads(task.crawler_params)
        else:
            crawler_params = None
        crawler_result = CrawlerResult(
            crawler_type=task.crawler_type,
            crawler_params=crawler_params,
            crawler_status=task.crawler_status,
            reason=None,
            exec_time=task.exec_time.strftime(settings.STANDARD_DATETIME_FORMATTER),
            exec_result=task.exec_result,
        )
        return response_ok(
            data = crawler_result
        )

    # 删除爬虫任务
    def delete(self,crawler_id: str):
        task_op.delete(crawler_id)
        return response_ok()

    # 获取爬虫结果
    async def get_result(self,crawler_id: str):
        task = task_op.query_by_crawler_id(crawler_id)
        if task is None:
            raise ApplicationException(ExceptionEnum.CRAWLER_ID_NOT_FOUND_ERROR)

        if task.crawler_params is not None:
            crawler_params = orjson.loads(task.crawler_params)
        else:
            crawler_params = None

        if task.exec_time is not None:
            exec_time = task.exec_time.strftime(settings.STANDARD_DATETIME_FORMATTER)
        else:
            exec_time = None
        if task.crawler_status == CrawlerStatusEnum.FINISH.value and task.exec_result == ExecResultEnum.SUCCESS.value:
            result_data = common_util.load_text_from_file(task.data_file_path)
            result_data = orjson.loads(result_data)
        else:
            result_data = None
        crawler_result = CrawlerResult(
            crawler_type=task.crawler_type,
            crawler_status=task.crawler_status,
            crawler_params = crawler_params,
            exec_time = exec_time,
            reason=task.reason,
            exec_result=task.exec_result,
            result_data= result_data
        )
        return response_ok(data = crawler_result)

    # 验证爬虫参数
    def check_crawler_params(self,request:CrawlerRequest) -> bool | ResponseModel:
        if not (request.crawler_type == typeEnum.value for typeEnum in CrawlerTypeEnum) :
            return response_error(
                error=ExceptionEnum.PARAMS_VALIDATION_FAILED,
                data="crawler_type is invalid"
            )
        if request.crawler_type == CrawlerTypeEnum.OCEAN_WAYBILL or request.crawler_type == CrawlerTypeEnum.AIR_WAYBILL:
            if request.crawler_params is None:
                return response_error(
                    error=ExceptionEnum.PARAMS_VALIDATION_FAILED,
                    data="crawler_params is not None"
                )
            if request.crawler_params.uni_code is None or request.crawler_params.waybill_num is None:
                return response_error(
                    error=ExceptionEnum.PARAMS_VALIDATION_FAILED,
                    data="crawler_params.uni_code or crawler_params.waybill_num is not None"
                )
            if request.crawler_params.uni_code.strip() == "" or request.crawler_params.waybill_num.strip() == "":
                return response_error(
                    error=ExceptionEnum.PARAMS_VALIDATION_FAILED,
                    data="crawler_params.uni_code or crawler_params.waybill_num is not Empty"
                )
        elif request.crawler_type == CrawlerTypeEnum.VOYAGE_ROUTE:
            if request.crawler_params is None:
                return response_error(
                    error=ExceptionEnum.PARAMS_VALIDATION_FAILED,
                    data="crawler_params is not None"
                )
            if request.crawler_params.uni_code is None or request.crawler_params.uni_code.strip() == "":
                return response_error(
                    error=ExceptionEnum.PARAMS_VALIDATION_FAILED,
                    data="crawler_params.uni_code or crawler_params.waybill_num is not None"
                )
        elif request.crawler_type == CrawlerTypeEnum.VESSEL_SCHEDULE:
            if request.crawler_params is None:
                return response_error(
                    error=ExceptionEnum.PARAMS_VALIDATION_FAILED,
                    data="crawler_params is not None"
                )
            if request.crawler_params.uni_code is None or request.crawler_params.vessel_name is None:
                return response_error(
                    error=ExceptionEnum.PARAMS_VALIDATION_FAILED,
                    data="crawler_params.uni_code or crawler_params.vessel_name is not None"
                )
            if request.crawler_params.uni_code.strip() == "" or request.crawler_params.vessel_name.strip() == "":
                return response_error(
                    error=ExceptionEnum.PARAMS_VALIDATION_FAILED,
                    data="crawler_params.uni_code or crawler_params.vessel_name is not Empty"
                )
        return True


