import datetime
import math
import bson
import arrow
from hashlib import md5

from app.db.models.tasks import Task
from app.db.models.jobs import Job
from app.db.models.asset_tasks import AssetTasks, get_monitor_urls
from app.db.models.assets import Asset
from app.reports.utils import (
    local_time_to_utc,
    utc_time_to_local,
    request_response_info,
    format_ratio
)
from app.libs.display import (
    TASK_TARGET_STATUS_MAP,
    LEVEL_NUM_STR,
    LEVEL_CN_NUM,
    WARNING_CATEGORY_KEY_LABEL_MAP,
    CONTENT_SUB_CATEGORY_MAPPING,
    ASSET_CHANGE_OP,
    ASSET_CHANGE_TYPE_CN
)
from app.handler.v2_task import parse_warning_id
from app.config.settings import DATETIME_FMT
from app.libs.enums import (
    TaskType,
    ChangeCheckType,
    TASK_TYPE_CONFIG,
    WS_TASK_GROUP_TUPLE,
    AVAILABLE_TASK_GROUP_TUPLE,
    WEAKNESS_TASK_GROUP_TUPLE
)

WEAKNESS_TASK_TYPE = list(WEAKNESS_TASK_GROUP_TUPLE)
WEB_SEC_TASK_TYPE = list(WS_TASK_GROUP_TUPLE)
AVAILABLE_TASK_TYPE = list(AVAILABLE_TASK_GROUP_TUPLE)


class ReportDataError(Exception):
    pass


class JobDataError(ReportDataError):
    pass


class TimeDataError(ReportDataError):
    pass


class OneJobData(object):
    """单个资产或者单个任务数据"""

    def __init__(self, job_id, report_type, start_time=None, end_time=None, web_sec_task_query=None):
        if not job_id:
            raise JobDataError(f"job_id is error")
        try:
            job_id = bson.ObjectId(job_id)
        except bson.errors.BSONError:
            raise JobDataError(f"job_id is error")
        self.job = Job.objects.filter(id=job_id).first()
        if not self.job:
            raise JobDataError(f"job is not exist")
        self.report_type, self.start_time, self.end_time = self.check_report_time(report_type, start_time, end_time)

        self.task_query_map = {"available": {}, "asset": {}, "web_sec": {}}
        if web_sec_task_query:
            self.task_query_map["web_sec"] = web_sec_task_query
        else:
            self._task_query()

        # web安全监测任务数据
        self.ws_task_data = {"is_monitor": False, "init": False,
                             "task_type_count": {"total": 0, **{t: 0 for t in WEB_SEC_TASK_TYPE}}}
        # 资产变动监测任务数据
        self.asset_task_data = {"is_monitor": False, "init": False, "task_type_count": {"total": 0, "asset": 0}}
        # 可用性监测任务数据
        self.available_task_data = {"is_monitor": False, "init": False,
                                    "task_type_count": {"total": 0, **{t: 0 for t in AVAILABLE_TASK_TYPE}}}
        # 监测结束时间
        self.end_time_set = set()
        # 发生错误的任务
        self.has_error_task = {k: {"exist": False, "msg": msg} for k, msg in TASK_TARGET_STATUS_MAP.items()}

    @staticmethod
    def check_report_time(report_type, start_time, end_time):
        """验证查询时间"""
        if report_type == "range" and all([start_time, end_time]):
            try:
                start_time = local_time_to_utc(start_time)
            except arrow.parser.ParserError:
                raise TimeDataError(f"start_time is error")
            try:
                end_time = local_time_to_utc(end_time)
            except arrow.parser.ParserError:
                raise TimeDataError(f"end_time is error")
            if start_time > end_time:
                raise TimeDataError(f"start_time > end_time")
        elif report_type == "range" and not all([start_time, end_time]):
            raise TimeDataError("report_type==range, start_time or end_time not exist")
        else:
            report_type = "latest"
            start_time = arrow.utcnow().datetime
            end_time = arrow.utcnow().datetime
        return report_type, start_time, end_time

    @staticmethod
    def format_cert_info(cert_info):
        if not cert_info:
            return {}
        if not_valid_before := cert_info.get("not_valid_before"):
            cert_info["not_valid_before"] = arrow.get(not_valid_before).datetime.strftime(DATETIME_FMT)
        if not_valid_after := cert_info.get("not_valid_after"):
            cert_info["not_valid_after"] = arrow.get(not_valid_after).datetime.strftime(DATETIME_FMT)
        return cert_info

    def _task_query(self):
        """构造任务数据查询语句"""
        if self.report_type == "range":
            base_query = {
                "endTime": {"$gte": self.start_time, "$lte": self.end_time},
                "jobId": self.job.id,
                "status": "completed"
            }
            self.task_query_map["available"].update(
                {"triggerType": "schedule", "taskType": {"$in": AVAILABLE_TASK_TYPE}, **base_query})
            self.task_query_map["asset"].update({"taskType": "asset", **base_query})
            self.task_query_map["web_sec"].update({"taskType": {"$in": WEB_SEC_TASK_TYPE}, **base_query})
        else:
            if (asset := Asset.objects.filter(jobId=self.job.id).only("resultId").first()) and asset.resultId:
                self.task_query_map["asset"].update({"_id": asset.resultId})

            web_sec_result_ids = []
            for t in WEB_SEC_TASK_TYPE:
                if _ := getattr(self.job, f"{t}ResultId"):
                    web_sec_result_ids.append(_)
            self.task_query_map["web_sec"].update({"_id": {"$in": web_sec_result_ids}})

            http_result_ids = [a.httpResultId for a in AssetTasks.objects.filter(
                jobId=self.job.id, triggerType="schedule", taskGroupType="http"
            ).only("httpResultId") if a.httpResultId]
            ping_result_ids = [a.pingResultId for a in AssetTasks.objects.filter(
                jobId=self.job.id, triggerType="schedule", taskGroupType="ping"
            ).only("pingResultId") if a.pingResultId]
            self.task_query_map["available"].update(
                {"_id": {"$in": ping_result_ids + http_result_ids}, "triggerType": "schedule"})

    @staticmethod
    def get_monitor_url(task_session_ids):
        """查询任务扫描url， 分多次查询， 防止mongodb 查询出错"""
        id_count = len(task_session_ids)
        if id_count <= 2000:
            return get_monitor_urls(task_session_id=task_session_ids)
        page = math.ceil(id_count / 2000)
        urls = list()
        for i in range(page):
            _ids = task_session_ids[i * 2000: (i+1) * 2000]
            urls.extend(get_monitor_urls(task_session_id=_ids))
        return list(set(urls))

    @staticmethod
    def _format_available_detail(detail, warnings):
        """格式化可用性任务详情， 方便向excel， word 中写入"""
        if not warnings:
            return ""
        warning_map = {w["id"]: w["title"] for w in warnings}
        result = {}
        for d in detail:
            warnings_ids = d.get("warnings", [])
            if not warnings:
                continue
            network_display_name = d.get("networkDisplayName")
            area = d.get("area")
            for wid in warnings_ids:
                title = warning_map.get(wid)
                result.setdefault(f"{network_display_name}{title}",
                                  {"network": network_display_name, "area": [], "title": title})
                result[f"{network_display_name}{title}"]["area"].append(area)
        detail = [f"{v['network']}: {'、'.join(v['area'])}{v['title']}" for _, v in result.items()]
        return "\n".join(sorted(detail))

    @staticmethod
    def _get_available_key(detail_str, job_id, target):
        """可用性异常详情、资产ID、监测目标(task.target)完全同时，为同一个异常"""
        return md5(f"{detail_str}{job_id}{target}".encode()).hexdigest()

    @staticmethod
    def _format_asset_detail(warnings):
        """格式化资产变动监测任务变动数据详情"""
        change_op_maps = {
            "subdomain": {"add": [], "remove": []},
            "port": {"add": [], "remove": []},
        }
        for w in warnings:
            if w["category"] not in change_op_maps:
                continue
            affects = w["affects"]
            affects = str(int(affects)) if isinstance(affects, (int, float)) else affects
            change_op_maps[w["category"]][w["detail"]["op"]].append(affects)
        result = {"subdomain": [], "port": []}
        for change_type, op_maps in change_op_maps.items():
            for op, affects in op_maps.items():
                if not affects:
                    continue
                change_type_cn = WARNING_CATEGORY_KEY_LABEL_MAP['asset'][change_type]
                op_cn = ASSET_CHANGE_OP[change_type][op]
                result[change_type].append(f"{change_type_cn}{'、'.join(affects)}{op_cn}")
        if result["subdomain"]:
            result["subdomain"] = "\n".join(result["subdomain"])
        if result["port"]:
            result["port"] = "\n".join(result["port"])
        return result, change_op_maps

    def _get_ws_task_source_data(self):
        """
        web安全监测任务数据
        """
        if self.ws_task_data.get("init"):
            return
        self.ws_task_data.update({"init": True})
        if not (task_query := self.task_query_map.get("web_sec", {})):
            return
        fields = ("id", "startTime", "endTime", "jobId", "target", "taskType", "taskSessionId", "result.warnings",
                  "result.targetStatus", "result.addition.cert_info", "result.addition.statistics.scan_count")
        if not (tasks := Task.objects.find(task_query).only(*fields).order_by("endTime")):
            return
        # 各个监测项任务执行次数
        task_type_count = {t: 0 for t in WEB_SEC_TASK_TYPE}
        # 各个监测项实际扫描次数
        ws_scan_count = {t: 0 for t in WEB_SEC_TASK_TYPE}
        # 所有发现的威胁
        task_type_warnings = {t: {} for t in WEB_SEC_TASK_TYPE}
        # 使用task_session_id 查询监测url数量
        task_session_ids = []
        # 根据日期统计发现的威胁等级、数量
        date_warnings = {}
        # 是否有异常任务
        task_has_error = False
        # ssl 漏洞对应的证书
        warning_cert_info_map = {}
        # 任务发生异常
        task_type_error_msg = {t: "" for t in WEB_SEC_TASK_TYPE}
        # 最晚监测时间
        last_time = None
        for task in tasks:
            task_type = task.taskType
            scan_count = task.result.addition.get("statistics", {}).get("scan_count", 0) or 1
            ws_scan_count[task_type] += scan_count
            task_type_count[task_type] += 1
            task_session_ids.append(task.taskSessionId)
            if not last_time or task.endTime < last_time:
                last_time = task.endTime
            self.end_time_set.add(task.endTime)
            if (target_status := task.result.targetStatus) and (target_status.status != "good"):
                task_has_error = True
                self.has_error_task[target_status.status]["exist"] = True
                task_type_error_msg[task_type] = TASK_TARGET_STATUS_MAP.get(target_status.status, "")
            for warning in task.result.warnings:
                warning.id = parse_warning_id(warning=warning, task_obj=task)
                w_dict = warning.to_dict()
                if w_dict["category"] == "certificate" and (cert_info := task.result.addition.get("cert_info", {})):
                    warning_cert_info_map.update({w_dict["id"]: self.format_cert_info(cert_info)})
                if w_dict["level"] <= 2:
                    w_dict["level"] = 2
                w_dict.update({
                    "foundAt": task.endTime,
                    "taskType": task.taskType
                })
                # 最早发现时间
                first_fount_at = task_type_warnings[task_type].get(w_dict["id"], {}).get("firstFoundAt") or w_dict["foundAt"]
                w_dict.update({"firstFoundAt": first_fount_at})
                # 发现次数
                found_count = task_type_warnings[task_type].get(w_dict["id"], {}).get("foundCount", 0)
                w_dict.update({"foundCount": found_count + 1})
                task_type_warnings[task_type].update({w_dict["id"]: w_dict})
                # 根据日期统计发现的威胁等级、数量
                date_str = utc_time_to_local(task.endTime, is_format=True, fmt="%Y-%m-%d")
                date_warnings.setdefault(date_str, {}).setdefault(w_dict["level"], 0)
                date_warnings[date_str][w_dict["level"]] += 1
        task_type_count.update({"total": sum(task_type_count.values())})
        ws_scan_count.update({"total": sum(ws_scan_count.values())})
        url_count = len(self.get_monitor_url(task_session_ids))
        self.ws_task_data.update({
                "is_monitor": True,
                "task_has_error": task_has_error,
                "url_count": url_count,
                "scan_count": ws_scan_count,
                "task_type_count": task_type_count,
                "task_type_warnings": task_type_warnings,
                "date_warnings": date_warnings,
                "warning_cert_info_map": warning_cert_info_map,
                "task_type_error_msg": task_type_error_msg,
                "last_time": last_time
            })

    def _get_available_task_source_data(self):
        """
        可用性监测任务数据
        """
        if self.available_task_data.get("init"):
            return
        self.available_task_data.update({"init": True})
        if not (task_query := self.task_query_map.get("available", {})):
            return
        fields = ("id", "startTime", "endTime", "jobId", "target", "taskType", "taskSessionId", "result.warnings",
                  "result.addition.respAvg", "taskId", "name", "result.addition.nodeTotalCount",
                  "result.addition.detail", "result.addition.nodeAbnormalCount")
        if not (tasks := Task.objects.find(task_query).only(*fields).order_by("endTime")):
            return
        task_type_count = {t: 0 for t in AVAILABLE_TASK_TYPE}
        # 资产是否有可用异常
        has_error = False
        # 发生可用性异常的监测地址
        has_error_targets = {t: set() for t in AVAILABLE_TASK_TYPE}
        # 发生可用性异常的监测次数
        has_error_count = {t: 0 for t in AVAILABLE_TASK_TYPE}
        # 可用性根据周期任务taskId 分组
        task_id_map = dict()
        # 发现的所有异常
        task_type_warnings = {t: {} for t in AVAILABLE_TASK_TYPE}
        for task in tasks:
            node_total_count = task.result.addition.get("nodeTotalCount")
            node_abnormal_count = task.result.addition.get("nodeAbnormalCount")
            if not node_total_count:
                continue
            task_type = task.taskType
            task_type_count[task_type] += 1
            self.end_time_set.add(task.endTime)
            # 可用率
            ratio = format_ratio(value=(1 - node_abnormal_count / node_total_count))
            # 响应时间
            resp_avg = task.result.addition.get("respAvg")
            task_id_map.setdefault(task.taskId, {
                "name": task.name or self.job.note,
                "target": task.target,
                "task_type": task.taskType,
                "source_ip": self.job.sourceIp or "",
                "count": 0,
                "is_warning": False,  # 是否有异常
                "resp_avg_list": [],  # 响应时间列表， 计算平均响应时间
                "availability_ratio_list": [],  # 可用率列表， 计算平均可用率
                "date_ratio": {}  # 根据日志统计可用率 {"2023-12-14": [0.72, 0.81, ]}
            })
            task_id_map[task.taskId]["resp_avg_list"].append(resp_avg)
            task_id_map[task.taskId]["availability_ratio_list"].append(ratio)
            task_id_map[task.taskId]["count"] += 1
            date_str = utc_time_to_local(task["endTime"], is_format=True, fmt="%Y-%m-%d")
            task_id_map[task.taskId]["date_ratio"].setdefault(date_str, [])
            task_id_map[task.taskId]["date_ratio"][date_str].append(ratio)

            detail = task.result.addition.get("detail", [])
            warnings = task.result.warnings
            if warnings:
                has_error = True
                has_error_targets[task_type].add(task.target)
                has_error_count[task_type] += 1
                task_id_map[task.taskId]["is_warning"] = True
                detail_str = self._format_available_detail(detail=detail, warnings=warnings)
                warning_id = self._get_available_key(detail_str, task.jobId, task.target)
                task_type_warnings[task_type].setdefault(warning_id, {})
                task_type_warnings[task_type][warning_id].update({
                    "taskType": task.taskType,
                    "target": task.target,
                    "fundAt": task.endTime,
                    "detail": detail_str
                })
        task_type_count.update({"total": sum(task_type_count.values())})
        has_error_count.update({"total": sum(has_error_count.values())})
        self.available_task_data.update({
                "is_monitor": True,
                "has_error": has_error,
                "task_type_count": task_type_count,
                "task_type_warnings": task_type_warnings,
                "has_error_targets": has_error_targets,
                "has_error_count": has_error_count,
                "task_id_map": task_id_map
            })

    def _get_asset_task_source_data(self):
        """
        资产变动监测任务数据
        """
        if self.asset_task_data.get("init"):
            return
        self.asset_task_data.update({"init": True})
        if not (task_query := self.task_query_map.get("asset", {})):
            return
        fields = ("id", "startTime", "endTime", "jobId", "target", "taskType", "taskSessionId", "result.warnings")
        if not (tasks := Task.objects.find(task_query).only(*fields).order_by("endTime")):
            return
        # 任务监测次数
        task_type_count = {"asset": 0}
        # 有变动的任务次数
        has_changed_count = 0
        # 变动类型次数统计
        change_type_map = {
            "subdomain": {"add": [], "remove": []},
            "port": {"add": [], "remove": []},
        }
        # 变动详情
        changed_list = []
        for task in tasks:
            task_type_count[task.taskType] += 1
            self.end_time_set.add(task.endTime)
            if warnings := task.result.warnings:
                has_changed_count += 1
                details, change_op_maps = self._format_asset_detail(warnings)
                for change_type, detail in details.items():
                    if not detail:
                        continue
                    changed_list.append({"change_type": change_type, "fundAt": task.endTime, "detail": detail})
                for change_type, op_maps in change_op_maps.items():
                    for op, affects in op_maps.items():
                        change_type_map[change_type][op].extend([{"affect": a, "time": task.endTime} for a in affects])
        task_type_count.update({"total": sum(task_type_count.values())})
        self.asset_task_data.update({
            "is_monitor": True,
            "task_type_count": task_type_count,
            "changed_list": changed_list,
            "has_changed_count": has_changed_count,
            "change_type_map": change_type_map
        })

    def get_export_time(self):
        """任务最早结束时间、最晚结束时间"""
        if self.report_type == "range":
            return utc_time_to_local(self.start_time), utc_time_to_local(self.end_time)
        elif self.end_time_set:
            return utc_time_to_local(min(self.end_time_set)), utc_time_to_local(max(self.end_time_set))
        return datetime.datetime.now(), datetime.datetime.now()

    def initialize_task_data(self):
        self._get_ws_task_source_data()
        self._get_available_task_source_data()
        self._get_asset_task_source_data()

    @staticmethod
    def get_job_level_by_warnings(warnings):
        if not warnings:
            return
        level_count_map = {}
        max_level = 0
        for warning in warnings:
            level = warning["level"]
            level_count_map.setdefault(level, 0)
            level_count_map[level] += 1
            if level > max_level:
                max_level = level
        return LEVEL_NUM_STR[max_level]["cn"]

    def get_job_level(self):
        """计算资产威胁等级"""
        task_type_warnings = self.ws_task_data.get("task_type_warnings", {})
        if not task_type_warnings:
            self._get_ws_task_source_data()
            task_type_warnings = self.ws_task_data.get("task_type_warnings", {})
        task_has_error = self.ws_task_data.get("task_has_error")
        is_monitor = self.ws_task_data.get("is_monitor")
        if not is_monitor:
            return "未知", "没有执行web安全监测"
        all_warnings = []
        for warning_dict in task_type_warnings.values():
            all_warnings.extend(list(warning_dict.values()))
        if all_warnings:
            return self.get_job_level_by_warnings(all_warnings), ""
        elif task_has_error:
            return "未知", [item["msg"] for item in self.has_error_task.values() if item["exist"]][0]
        else:
            return "安全", ""

    @staticmethod
    def ssl_vul_warning_detail(task_type, detail_map, warning):
        level = warning["level"]
        category = warning["category"]
        category = WARNING_CATEGORY_KEY_LABEL_MAP.get(task_type, {}).get("category") or category
        title = warning["title"]
        detail = {
            "title": title,
            "category": category,
            "level_cn": LEVEL_NUM_STR[level]["cn"],
            "level": level,
            "cnvd": warning["cnvd"][0] if warning.get("cnvd") else "",
            "description": warning.get("description") or "",
            "impact": warning.get("impact") or "",
            "recommendation": warning.get("recommendation") or "",
            "reference": "\n".join(warning.get("reference")) if warning.get("reference") else "",
            "first_time": [],
            "last_time": [],
            "fount_count": [],
            "targets": []
        }
        if task_type == "ssl" and category == "certificate":
            detail.update({"cert_info": warning.get("cert_info")})
        detail_map.setdefault(title, detail)
        detail_map[title]["first_time"].append(warning.get("firstFoundAt"))
        detail_map[title]["last_time"].append(warning.get("foundAt"))
        detail_map[title]["fount_count"].append(warning.get("foundCount"))

        traffics = warning.get("traffics", [])
        if traffics:
            request, response = request_response_info(
                traffics[0].get("request", {}),
                traffics[0].get("response", {})
            )
        else:
            request, response = "", ""
        detail_map[title]["targets"].append({
            "affects": warning.get("affects"),
            "payload": warning.get("payload") or "",
            "detail": warning.get("detailText"),
            "request": request or "",
            "response": response or "",
        })

    @staticmethod
    def ssl_vul_title_count(task_type, title_count_map, warning):
        level = warning["level"]
        category = warning["category"]
        category = WARNING_CATEGORY_KEY_LABEL_MAP.get(task_type, {}).get(category) or category
        title = warning["title"]
        title_data = {
            "title": title,
            "category": category,
            "level": level,
            "level_cn": LEVEL_NUM_STR[level]["cn"],
            "count": 0,
            "ratio": "0.0%"
        }
        title_count_map.setdefault(title, title_data)
        title_count_map[title]["count"] += 1

    @staticmethod
    def format_ssl_vul_warning_detail(detail_map):
        """威胁详情发现时间与发现次数"""
        detail_list = []
        for _, detail in detail_map.items():
            if not detail:
                continue
            first_time = utc_time_to_local(min(detail.pop("first_time")), is_format=True)
            last_time = utc_time_to_local(max(detail.pop("last_time")), is_format=True)
            fount_count = sum(detail.pop("fount_count"))
            detail.update({
                "first_time": first_time, "last_time": last_time,
                "fount_count": fount_count, "targets_count": len(detail.get("targets"))
            })
            detail_list.append(detail)

        return list(sorted(detail_list, key=lambda x: x["level"], reverse=True))

    @staticmethod
    def format_content_sec_warning_detail(detail_map):
        """威胁详情发现时间与发现次数"""
        for _, detail in detail_map.items():
            if not detail:
                continue
            first_time = utc_time_to_local(min(detail.pop("first_time")), is_format=True)
            last_time = utc_time_to_local(max(detail.pop("last_time")), is_format=True)
            fount_count = sum(detail.pop("fount_count"))
            detail.update({
                "first_time": first_time, "last_time": last_time,
                "fount_count": fount_count, "targets_count": len(detail.get("targets"))
            })
        return detail_map

    @staticmethod
    def format_change_check_change(change, change_type):
        res = {
            "status": "变更",
            "change_type": change_type
        }
        if change_type == ChangeCheckType.resource.value:
            if not change["baseline"]["is_existing"] and change["current"]["is_existing"]:
                res["status"] = "新增"
            elif change["baseline"]["is_existing"] and not change["current"].get("is_existing"):
                res["status"] = "删除"
        res["url"] = change.get("url", "")
        res["words"] = "、".join(change.get("words", [])) or "暂无数据"
        baseline = change.get("baseline", {})
        current = change.get("current", {})
        res.update({
            "baseline": {
                "status_code": int(baseline.get("status_code", 0)) or "",
                "size": (baseline.get("size") or 0) / 1000,
                "md5": baseline.get("md5", "") or "--",
            },
            "current": {
                "status_code": int(current.get("status_code", 0)) or "",
                "size": (current.get("size") or 0) / 1000,
                "md5": current.get("md5", "") or "--",
            },
        })
        return res

    @staticmethod
    def format_change_check_detail(warning):
        res = {
            "level": warning["level"],
            "level_cn": warning.get("levelName") or LEVEL_NUM_STR.get(warning["level"], {}).get("cn", ""),
            "affects": warning["affects"],
        }
        detail = {}
        category_cn = []
        for k, v in warning["detail"].items():
            category_cn.append(WARNING_CATEGORY_KEY_LABEL_MAP[TaskType.change_check.value][k])
            if k == ChangeCheckType.resource.value:
                detail.update({
                    k: {
                        "change_ratio": v["change_ratio"],
                        "changes": [OneJobData.format_change_check_change(_, k) for _ in v["changes"]]
                    }
                })
            else:
                detail.update({
                    k: {
                        "change_ratio": format_ratio(value=v["change_ratio"], is_100=True),
                        "change": OneJobData.format_change_check_change(v.get("change", {}), k)
                    }
                })
        res.update({"category_cn": "、".join(category_cn), **detail})
        return res

    @staticmethod
    def get_content_risk_url_data(category, warning):
        """内容违规每个风险地址对应"""
        count = 0
        msg = []
        detail = warning.get("detail", {})
        for t, risks in detail.items():
            count += len(risks)
            msg.append(f"{CONTENT_SUB_CATEGORY_MAPPING[category][int(float(t))]}（{'、'.join(risks)}）")
        return {"count": count, "msg": msg, "affects": warning["affects"]}

    @staticmethod
    def get_security_event_risk_url_data(category, warning):
        if category == "black_links":
            black_links = warning.get("detail", {}).get("links", [])
            return {"count": len(black_links), "black_links": black_links, "affects": warning["affects"]}
        elif category == "broken_links":
            broken_links = warning.get("detail", [])
            return {"count": len(broken_links), "broken_links": broken_links, "affects": warning["affects"]}
        elif "foreign_links" in category:
            count = 0
            details = []
            for risk_url, item in warning.get("detail", {}).items():
                count += 1
                tmp = []
                for t, _d in item.items():
                    if t == "icp":
                        for d in _d:
                            tmp.append(f"{d.get('risk_domain', '')} 未备案；")
                    elif t == "keyword":
                        for d in _d:
                            tmp.append(f"违规内容 {d.get('risk_rule', '')}；")
                    elif t == "domain":
                        for d in _d:
                            tmp.append(f"风险域名 {d.get('risk_domain', '')}；")
                details.append({"risk_url": risk_url, "detail": tmp})
            return {"count": count, "details": details, "affects": warning["affects"]}
        elif category == "malscan":
            return {"details": warning.get("detail"), "affects": warning["affects"]}
        elif category == "cryjack":
            return {"details": warning.get("detail"), "affects": warning["affects"]}

    @staticmethod
    def get_content_sec_detail(detail_map, warning, task_type):
        if task_type not in ["content", "securityEvent"]:
            return
        title = warning.get("title")
        category = warning["category"]
        level = warning["level"]
        detail = {
            "title": title,
            "category": category,
            "level_cn": LEVEL_NUM_STR[level]["cn"],
            "level": level,
            "first_time": [],
            "last_time": [],
            "fount_count": [],
            "targets": []
        }
        if category == "foreign_links":
            category = f"foreign_links_{level}"
        if not detail_map.get(category):
            detail_map[category] = detail
        detail_map[category]["first_time"].append(warning.get("firstFoundAt"))
        detail_map[category]["last_time"].append(warning.get("foundAt"))
        detail_map[category]["fount_count"].append(warning.get("foundCount"))
        if task_type == "content":
            detail_map[category]["targets"].append(OneJobData.get_content_risk_url_data(category, warning))
        else:
            detail_map[category]["targets"].append(OneJobData.get_security_event_risk_url_data(category, warning))

    @staticmethod
    def get_available_task_detail(task_id_map):
        # http 任务
        http_task_list = []
        # ping 任务
        ping_task_list = []
        for task_id, item in task_id_map.items():
            resp_avg_list = item.pop("resp_avg_list", [])
            if resp_avg_list:
                item["resp_avg"] = int(sum(resp_avg_list) / len(resp_avg_list))
            availability_ratio_list = item.pop("availability_ratio_list", [])
            if availability_ratio_list:
                item["availability_ratio"] = format_ratio(
                    x=sum(availability_ratio_list), y=len(availability_ratio_list), is_100=True)
            date_ratio = item.pop("date_ratio", {})
            if date_ratio:
                _date_ratio = {"date": [], "ratio": []}
                for _date_str, _availability_ratio_list in date_ratio.items():
                    _date_ratio["date"].append(_date_str)
                    _date_ratio["ratio"].append(
                        format_ratio(
                            x=sum(_availability_ratio_list), y=len(_availability_ratio_list), is_100=True))
                item["date_ratio"] = _date_ratio
            if item["task_type"].lower() == "http":
                item["task_type"] = item["task_type"].upper()
                http_task_list.append(item)
            else:
                item["task_type"] = item["task_type"].capitalize()
                ping_task_list.append(item)
        http_task_list = list(sorted(http_task_list, key=lambda x: x["availability_ratio"]))
        ping_task_list = list(sorted(ping_task_list, key=lambda x: x["availability_ratio"]))
        return http_task_list, ping_task_list

    @staticmethod
    def export_vul_data(warning_maps, **kwargs):
        """web漏洞详情统计"""
        count = len(warning_maps.values())
        # 按等级统计数量
        level_count_map = {d["en"]: 0 for _, d in LEVEL_NUM_STR.items() if _ >= 2}
        # 按类型统计数量
        category_count_map = {}
        # 按名称统计数量
        title_count_map = {}
        # 详情
        detail_map = {}
        for _, warning in warning_maps.items():
            level = warning["level"]
            level_count_map[LEVEL_NUM_STR[level]["en"]] += 1

            category = warning.get("category")
            category_count_map.setdefault(category, 0)
            category_count_map[category] += 1
            OneJobData.ssl_vul_title_count("vul", title_count_map, warning)
            OneJobData.ssl_vul_warning_detail("vul", detail_map, warning)

        for _, item in title_count_map.items():
            item["ratio"] = f"{format_ratio(x=item['count'], y=count, is_100=True)}%"
        title_count_list = list(sorted(title_count_map.values(), key=lambda x: (x["level"], x["count"]), reverse=True))

        result = {
            "category_count_map": category_count_map,
            "level_count_map": level_count_map,
            "detail_list": OneJobData.format_ssl_vul_warning_detail(detail_map),
            "count": count,
            "title_count_list": title_count_list
        }
        return result

    @staticmethod
    def export_ssl_data(warning_maps, **kwargs):
        """ssl漏洞详情统计"""
        count = len(warning_maps.values())
        warning_cert_info_map = kwargs.get("warning_cert_info_map")
        # 按等级统计数量
        level_count_map = {c: {d["en"]: 0 for _, d in LEVEL_NUM_STR.items() if _ >= 2} for c in
                           WARNING_CATEGORY_KEY_LABEL_MAP["ssl"].keys()}
        # 按类型统计数量
        category_count_map = {}
        # 按名称统计数量
        title_count_map = {}
        # 详情
        detail_map = {}
        # 按类型统计详情
        category_detail_map = {c: [] for c in WARNING_CATEGORY_KEY_LABEL_MAP["ssl"].keys()}
        for _, warning in warning_maps.items():

            category = warning.get("category")
            category_count_map.setdefault(category, 0)
            category_count_map[category] += 1

            level = warning["level"]
            level_count_map[category][LEVEL_NUM_STR[level]["en"]] += 1
            if category == "certificate":
                warning.update({"cert_info": warning_cert_info_map.get(_)})

            OneJobData.ssl_vul_title_count("ssl", title_count_map, warning)
            OneJobData.ssl_vul_warning_detail("ssl", detail_map, warning)
        for _, item in title_count_map.items():
            item["ratio"] = f"{format_ratio(x=item['count'], y=count, is_100=True)}%"
        title_count_list = list(sorted(title_count_map.values(), key=lambda x: (x["level"], x["count"]), reverse=True))
        detail_list = OneJobData.format_ssl_vul_warning_detail(detail_map)
        for detail in detail_list:
            category_detail_map[detail["category"]].append(detail)
        result = {
            "category_count_map": category_count_map,
            "level_count_map": level_count_map,
            "category_detail_map": category_detail_map,
            "count": count,
            "title_count_list": title_count_list
        }
        return result

    @staticmethod
    def export_content_data(warning_maps, **kwargs):
        """内容违规"""
        count = len(warning_maps.values())
        # 按类型统计数量
        category_count_map = {t: 0 for t in WARNING_CATEGORY_KEY_LABEL_MAP["content"].keys()}
        # 按照子类型统计
        sub_category_map = {}
        # 详情
        detail_map = {t: {} for t in WARNING_CATEGORY_KEY_LABEL_MAP["content"].keys()}
        for _, warning in warning_maps.items():
            title = warning.get("title")
            category = warning["category"]
            category_count_map[category] += 1
            level = warning["level"]
            for t, _ in warning.get("detail", {}).items():
                t = int(float(t))
                default_v = {
                    "title": title,
                    "kind": CONTENT_SUB_CATEGORY_MAPPING[category][t],
                    "level_cn": LEVEL_NUM_STR[level]["cn"],
                    "level": level,
                    "count": 0,
                }
                sub_category_map.setdefault(f"{category}_{t}", default_v)
                sub_category_map[f"{category}_{t}"]["count"] += 1
            OneJobData.get_content_sec_detail(detail_map, warning, "content")
        detail_map = OneJobData.format_content_sec_warning_detail(detail_map)
        for _, sub_category in sub_category_map.items():
            sub_category.update({"ratio": f"{format_ratio(x=sub_category['count'], y=count, is_100=True)}%"})
        result = {
            "category_count_map": category_count_map,
            "detail_map": detail_map,
            "count": count,
            "sub_category_count": list(
                sorted(sub_category_map.values(),  key=lambda x: (x["level"], x["count"]), reverse=True))
        }
        return result

    @staticmethod
    def export_security_event_data(warning_maps, **kwargs):
        count = len(warning_maps.values())
        # 按类型统计数量
        category_count_map = {t: 0 for t in WARNING_CATEGORY_KEY_LABEL_MAP["securityEvent"].keys()}
        # 按照类型与威胁等级统计
        security_level_map = {}
        # 详情
        detail_map = {t: {} for t in WARNING_CATEGORY_KEY_LABEL_MAP["securityEvent"].keys() if "foreign_links" not in t}
        # 风险外链特殊处理
        foreign_links_detail_map = {f"foreign_links_{_}": {} for _ in LEVEL_NUM_STR.keys() if _ >= 2}

        for _, warning in warning_maps.items():
            title = warning.get("title")
            category = warning["category"]
            category_count_map[category] += 1
            level = warning["level"]
            default_v = {
                "title": title,
                "level_cn": LEVEL_NUM_STR[level]["cn"],
                "level": level,
                "count": 0,
                "ratio": "0.0%"
            }
            if category == "foreign_links":
                title = f"{title}_{level}"
                OneJobData.get_content_sec_detail(foreign_links_detail_map, warning, "securityEvent")
            else:
                OneJobData.get_content_sec_detail(detail_map, warning, "securityEvent")
            security_level_map.setdefault(title, default_v)
            security_level_map[title]["count"] += 1
            security_level_map[title]["ratio"] = f"{format_ratio(x=security_level_map[title]['count'], y=count, is_100=True)}%"
        detail_map = OneJobData.format_content_sec_warning_detail(detail_map)
        foreign_links_detail_map = OneJobData.format_content_sec_warning_detail(foreign_links_detail_map)
        foreign_links_detail_list = list(sorted(
            [v for v in foreign_links_detail_map.values() if v], key=lambda x: x["level"], reverse=True))
        result = {
            "category_count_map": category_count_map,
            "detail_map": detail_map,
            "count": count,
            "foreign_links_detail_list": foreign_links_detail_list,
            "security_level_map": list(
                sorted(security_level_map.values(),  key=lambda x: (x["level"], x["count"]), reverse=True))
        }
        return result

    @staticmethod
    def export_change_check_data(warning_maps, **kwargs):
        count = len(warning_maps.values())
        # 按类型统计数量
        category_count_map = {t: 0 for t in WARNING_CATEGORY_KEY_LABEL_MAP["changeCheck"].keys()}
        # 按等级统计数量
        level_count_map = {d["en"]: 0 for _, d in LEVEL_NUM_STR.items() if _ >= 2}
        detail_list = []
        for _, warning in warning_maps.items():
            for k in warning["detail"].keys():
                category_count_map[k] += 1
            level = warning["level"]
            level_count_map[LEVEL_NUM_STR[level]["en"]] += 1
            detail_list.append(OneJobData.format_change_check_detail(warning))
        detail_list = list(sorted(detail_list, key=lambda x: x["level"], reverse=True))
        result = {
            "category_count_map": category_count_map,
            "level_count_map": level_count_map,
            "detail_list": detail_list,
            "count": count,
        }
        return result

    def export_ws_task_data(self):
        self._get_ws_task_source_data()
        data = {
            "is_monitor": self.ws_task_data.get("is_monitor"),
            "task_type_count": self.ws_task_data.get("task_type_count", {}),
            "url_count": self.ws_task_data.get("url_count", 0),
            "scan_count": self.ws_task_data.get("scan_count", {"total": 0}),
            "date_warnings": self.ws_task_data.get("date_warnings", {}),
            "task_type_error_msg": self.ws_task_data.get("task_type_error_msg", {t: "" for t in WEB_SEC_TASK_TYPE}),
            "last_time": self.ws_task_data.get("last_time", "")
        }
        task_type_warnings = self.ws_task_data.get("task_type_warnings", {})
        warning_cert_info_map = self.ws_task_data.get("warning_cert_info_map", {})
        for task_type in WEB_SEC_TASK_TYPE:
            warning_maps = task_type_warnings.get(task_type, {})
            task_type_line = TASK_TYPE_CONFIG[task_type]["line"]
            func = getattr(self, f"export_{task_type_line}_data")
            data.update({task_type_line: func(warning_maps, warning_cert_info_map=warning_cert_info_map)})
        return data

    def export_available_task_data(self):
        self._get_available_task_source_data()
        data = {
            "is_monitor": self.available_task_data.get("is_monitor"),
            "task_type_count": self.available_task_data.get("task_type_count", {}),
            "has_error_count": self.available_task_data.get("has_error_count", {}).get("total", 0),
            "task_type_error_count": self.available_task_data.get("has_error_count", {}),
            "has_error_ratio": "0.0%",
            "has_error_target_count": {"total": 0, **{t: 0 for t in AVAILABLE_TASK_TYPE}}
        }
        if total := data["task_type_count"].get("total"):
            error_count = data["has_error_count"]
            data.update({"has_error_ratio": f"{format_ratio(x=error_count, y=total, is_100=True)}%"})
        for _, targets in self.available_task_data.get("has_error_targets", {}).items():
            data["has_error_target_count"][_] = len(targets)
            data["has_error_target_count"]["total"] += len(targets)
        task_id_map = self.available_task_data.get("task_id_map", {})
        http_task_list, ping_task_list = self.get_available_task_detail(task_id_map)
        data.update({"http_task_list": http_task_list, "ping_task_list": ping_task_list})
        return data

    def export_asset_task_data(self):
        self._get_asset_task_source_data()
        data = {
            "is_monitor": self.asset_task_data.get("is_monitor"),
            "task_type_count": self.asset_task_data.get("task_type_count", {}),
            "has_changed_count": self.asset_task_data.get("has_changed_count", 0),
            "has_changed_ratio": "0.0%"
        }
        if total := self.asset_task_data.get("task_type_count", {}).get("total"):
            data.update({"has_changed_ratio": f"{format_ratio(x=data['has_changed_count'], y=total, is_100=True)}%"})
        change_type_map = self.asset_task_data.get("change_type_map", {})
        change_detail = {}
        change_op_count = {}
        for change_type, _d in ASSET_CHANGE_OP.items():
            change_detail.update({change_type: []})
            change_type_cn = WARNING_CATEGORY_KEY_LABEL_MAP["asset"][change_type]
            for op, op_cn in _d.items():
                change_op_count.update({f"{change_type}_{op}": {"cn": f"{change_type_cn}{op_cn}", "count": 0}})
        for change_type, op_maps in change_type_map.items():
            for op, changes in op_maps.items():
                for change in changes:
                    change_detail[change_type].append({
                        "affect": change["affect"],
                        "op_cn": ASSET_CHANGE_OP[change_type][op],
                        "time": change["time"],
                        "local_time": utc_time_to_local(change["time"], is_format=True),
                    })
                change_op_count[f"{change_type}_{op}"]["count"] += len(changes)
        for change_type, change_list in change_detail.items():
            change_detail[change_type] = list(sorted(change_list, key=lambda x: x["time"]))
        data.update({
            "change_op_count": change_op_count,
            "change_detail": change_detail
        })
        return data


class MutilJobData(object):
    """多个资产数据"""

    def __init__(self, job_ids, report_type, start_time=None, end_time=None):
        self.job_ids = job_ids
        if not self.job_ids:
            raise ValueError("没有选择导出报告资产")
        self.report_type = report_type
        self.start_time = start_time
        self.end_time = end_time

    @staticmethod
    def _get_bug_detail(task_type, job, warning):
        traffics = warning.get("traffics", [])
        if traffics:
            request, response = request_response_info(
                traffics[0].get("request", {}),
                traffics[0].get("response", {})
            )
        else:
            request, response = "", ""
        data = [
            warning.get("title"),
            LEVEL_NUM_STR[warning['level']]["cn"],
            WARNING_CATEGORY_KEY_LABEL_MAP[task_type].get(warning["category"]) or warning["category"],
            warning.get("affects"),
            job.targetUrl,
            job.sourceIp or "--",
            job.note,
            utc_time_to_local(warning.get("foundAt"), is_format=True),
            request or "暂无数据",
            response or "暂无数据",
            warning.get("payload", ""),
            warning.get("detailText", ""),
            warning.get("description", ""),
            warning.get("recommendation", ""),
            warning['level'],
            warning.get("foundAt")
        ]
        return data

    @staticmethod
    def _format_security_event_detail_str(category, detail):
        if category == "black_links":
            return f'暗链地址：{"、".join(detail.get("links", []))}'
        elif category == "broken_links":
            return f"坏链地址: {'、'.join(detail)}"
        elif category == "cryjack":
            return "\n".join(
                [f"关键词：{result.get('pattern', '')}；链接资源：{result.get('src', '')}" for result in detail])
        elif category == "malscan":
            return "\n".join([f"特征描述：{d['description']}; 类型：{d['category']}; 辅助信息：{d['url']}" for d in detail])
        elif category == "foreign_links":
            tmp = []
            for _, v in detail.items():
                for t, _d in v.items():
                    if t == "icp":
                        for d in _d:
                            tmp.append(
                                f"未备案：{d.get('risk_url', '')}；域名{d.get('risk_domain', '')}未备案，可能为不合规网站，请及时处理。")
                    elif t == "keyword":
                        for d in _d:
                            tmp.append(f"违规内容：{d.get('risk_url', '')}；{d.get('extra_details', '')}")
                    elif t == "domain":
                        for d in _d:
                            tmp.append(f"风险域名：{d.get('risk_domain', '')}；{d.get('extra_details', '')}")
            return "\n".join(tmp)
        return ""

    @staticmethod
    def _get_security_event_detail(job, warning):
        data = [
            warning.get("title"),
            LEVEL_NUM_STR[warning["level"]]["cn"],
            warning.get("affects"),
            job.targetUrl,
            job.sourceIp or "--",
            job.note,
            utc_time_to_local(warning.get("foundAt"), is_format=True),
            MutilJobData._format_security_event_detail_str(warning["category"], warning.get("detail")),
            warning['level'],
            warning.get("foundAt")
        ]
        return data

    @staticmethod
    def _get_content_detail(job, warning):
        tmp = []
        detail = warning.get("detail", {})
        category = warning["category"]
        for t, v in detail.items():
            tmp.append(f"{CONTENT_SUB_CATEGORY_MAPPING[category][int(float(t))]}: {'、'.join(v)}")
        data = [
            warning.get("title"),
            LEVEL_NUM_STR[warning["level"]]["cn"],
            warning.get("affects"),
            job.targetUrl,
            job.sourceIp or "--",
            job.note,
            utc_time_to_local(warning.get("foundAt"), is_format=True),
            "\n".join(tmp),
            warning['level'],
            warning.get("foundAt")
        ]
        return data

    def _export_bug_warning(self, job, task_type, warning_map, detail=True):
        detail_list = []
        # 按等级统计数量
        level_count_map = {}
        # 按类型统计数量
        category_count_map = {}
        # 按名称统计数量
        title_count_map = {}
        for _, warning in warning_map.items():
            level = warning["level"]
            level_count_map.setdefault(level, 0)
            level_count_map[level] += 1
            category = warning.get("category")
            category_count_map.setdefault(category, 0)
            category_count_map[category] += 1
            OneJobData.ssl_vul_title_count(task_type, title_count_map, warning)
            if detail:
                detail_list.append(self._get_bug_detail(task_type, job, warning))
        return detail_list, category_count_map, level_count_map, title_count_map

    def export_vul_warning(self, job, warning_map, detail=True):
        return self._export_bug_warning(job, "vul", warning_map, detail=detail)

    def export_ssl_warning(self, job, warning_map, detail=True):
        return self._export_bug_warning(job, "ssl", warning_map, detail=detail)

    def export_content_warning(self, job, warning_map, detail=True):
        detail_list = []
        # 按类型统计数量
        category_count_map = {}
        # 按照子类型统计
        sub_category_map = {}
        for _, warning in warning_map.items():
            title = warning.get("title")
            category = warning["category"]
            category_count_map.setdefault(category, 0)
            category_count_map[category] += 1
            level = warning["level"]
            for t, _ in warning.get("detail", {}).items():
                t = int(float(t))
                default_v = {
                    "title": title,
                    "kind": CONTENT_SUB_CATEGORY_MAPPING[category][t],
                    "level_cn": LEVEL_NUM_STR[level]["cn"],
                    "level": level,
                    "count": 0,
                }
                sub_category_map.setdefault(f"{category}_{t}", default_v)
                sub_category_map[f"{category}_{t}"]["count"] += 1
            if detail:
                detail_list.append(self._get_content_detail(job, warning))
        return detail_list, category_count_map, sub_category_map

    def export_security_event_warning(self, job, warning_map, detail=True):
        detail_list = []
        # 按类型统计数量
        category_count_map = {}
        # 按照风险等级统计
        security_level_map = {}
        for _, warning in warning_map.items():
            title = warning.get("title")
            category = warning["category"]
            category_count_map.setdefault(category, 0)
            category_count_map[category] += 1
            level = warning["level"]
            default_v = {
                "title": title,
                "level_cn": LEVEL_NUM_STR[level]["cn"],
                "level": level,
                "count": 0,
                "ratio": "0.0%"
            }
            if category == "foreign_links":
                title = f"{title}_{level}"
            security_level_map.setdefault(title, default_v)
            security_level_map[title]["count"] += 1
            if detail:
                detail_list.append(self._get_security_event_detail(job, warning))
        return detail_list, category_count_map, security_level_map

    @staticmethod
    def _format_change_check_detail_str(detail):
        change_type = detail["change_type"]
        status = detail["status"]
        baseline, current = [], []
        if change_type != ChangeCheckType.resource.value:
            baseline.extend([
                f"文件大小: {detail['baseline']['size']}KB",
                f"状态码: {detail['baseline']['status_code']}",
                f"MD5值: {detail['baseline']['md5']}",
            ])
            current.extend([
                f"文件大小: {detail['current']['size']}KB",
                f"状态码: {detail['current']['status_code']}",
                f"MD5值: {detail['current']['md5']}",
            ])
        elif status == "新增":
            current.extend([
                f"URL: {detail['url']}",
                f"文件大小: {detail['current']['size']}KB",
                f"状态码: {detail['current']['status_code']}",
                f"MD5值: {detail['current']['md5']}",
            ])
        elif status == "删除":
            current.extend([
                f"URL: {detail['url']}",
                f"文件大小: {detail['current']['size']}KB",
                f"状态码: {detail['current']['status_code']}",
                f"MD5值: {detail['current']['md5']}",
            ])
        else:
            baseline.extend([
                f"URL: {detail['url']}",
                f"文件大小: {detail['baseline']['size']}KB",
                f"状态码: {detail['baseline']['status_code']}",
                f"MD5值: {detail['baseline']['md5']}",
            ])
            current.extend([
                f"URL: {detail['url']}",
                f"文件大小: {detail['current']['size']}KB",
                f"状态码: {detail['current']['status_code']}",
                f"MD5值: {detail['current']['md5']}",
            ])
        return "\n".join(baseline), "\n".join(current)

    @staticmethod
    def _get_change_check_detail(warning, job):
        details = []
        for tpy, detail in warning["detail"].items():
            if tpy == ChangeCheckType.resource.value:
                for change in detail["changes"]:
                    _ = OneJobData.format_change_check_change(change=change, change_type=tpy)
                    _["change_ratio"] = 1
                    details.append(_)
            else:
                _ = OneJobData.format_change_check_change(change=detail["change"], change_type=tpy)
                _["change_ratio"] = f'{format_ratio(value=detail["change_ratio"], is_100=True)}%'
                details.append(_)
        res = []
        for d in details:
            baseline_str, current_str = MutilJobData._format_change_check_detail_str(d)
            res.append([
                WARNING_CATEGORY_KEY_LABEL_MAP[TaskType.change_check.value].get(d["change_type"]) or d["change_type"],
                LEVEL_NUM_STR[warning["level"]]["cn"],
                warning.get("affects"),
                job.targetUrl,
                job.sourceIp or "--",
                job.note,
                utc_time_to_local(warning.get("foundAt"), is_format=True),
                d["status"],
                d["change_ratio"],
                baseline_str,
                current_str,
                warning['level'],
                warning.get("foundAt")
            ])
        return res

    def export_change_check_warning(self, job, warning_map, detail=True):
        detail_list = []
        # 按类型统计数量
        category_count_map = {}
        # 按照风险等级统计
        security_level_map = {}

        for _, warning in warning_map.items():
            level = warning["level"]
            category_count_map.setdefault("total", 0)
            category_count_map["total"] += 1
            for k in warning["detail"].keys():
                category_count_map.setdefault(k, 0)
                category_count_map[k] += 1
                default_v = {
                    "title": WARNING_CATEGORY_KEY_LABEL_MAP[TaskType.change_check.value][k],
                    "level_cn": LEVEL_NUM_STR[level]["cn"],
                    "level": level,
                    "count": 0,
                    "ratio": "0.0%"
                }
                title = f"{k}_{level}"
                security_level_map.setdefault(title, default_v)
                security_level_map[title]["count"] += 1
            if detail:
                detail_list.extend(self._get_change_check_detail(warning, job))

        return detail_list, category_count_map, security_level_map

    def marge_bug_data(self, job, task_type, warning_maps, ws_task_detail, bug_category_count_map,
                       bug_level_count_map, this_job_bug_level_map, bug_title_count_map, bug_details):
        detail_list, category_count_map, level_count_map, title_count_map = self._export_bug_warning(
            job, task_type, warning_maps, detail=ws_task_detail)
        self.marge_dict_data(bug_category_count_map, category_count_map)
        self.marge_dict_data(bug_level_count_map, level_count_map)
        self.marge_dict_data(this_job_bug_level_map, level_count_map)
        bug_details.extend(detail_list)
        self.marge_dict_add_job_count(bug_title_count_map, title_count_map)

    @staticmethod
    def marge_dict_data(result_dict, this_dict):
        for t, v in this_dict.items():
            if not isinstance(v, (int, float)):
                continue
            if t in result_dict:
                result_dict[t] += v
            else:
                result_dict[t] = v
        return result_dict

    @staticmethod
    def marge_dict_add_job_count(result_dict, this_dict):
        for t, d in this_dict.items():
            if not isinstance(d, dict):
                continue
            if t in result_dict:
                result_dict[t]["job_count"] += 1
                result_dict[t]["count"] += d["count"]
            else:
                result_dict.update({t: {"job_count": 1, **d}})
        return result_dict

    @staticmethod
    def _get_bug_msg(level_map):
        res = []
        if critical := level_map.get(5):
            res.append(f"{critical}个严重")
        if high := level_map.get(4):
            res.append(f"{high}个高危")
        if medium := level_map.get(3):
            res.append(f"{medium}个中危")
        if low := level_map.get(2):
            res.append(f"{low}个低危")
        return "、".join(res)

    @staticmethod
    def _get_content_msg(content_category):
        res = []
        for t, v in content_category.items():
            if not WARNING_CATEGORY_KEY_LABEL_MAP['content'].get(t):
                continue
            if v:
                res.append(f"{v}个{WARNING_CATEGORY_KEY_LABEL_MAP['content'][t]}")
        return "、".join(res)

    @staticmethod
    def _get_security_msg(security_category):
        res = []
        for t, v in security_category.items():
            if not WARNING_CATEGORY_KEY_LABEL_MAP['securityEvent'].get(t):
                continue
            if v:
                res.append(f"{v}个{WARNING_CATEGORY_KEY_LABEL_MAP['securityEvent'][t]}")
        return "、".join(res)

    @staticmethod
    def _get_change_check_msg(category_map):
        res = []
        for t, v in category_map.items():
            if not WARNING_CATEGORY_KEY_LABEL_MAP['changeCheck'].get(t):
                continue
            if v:
                res.append(f"{v}个{WARNING_CATEGORY_KEY_LABEL_MAP['changeCheck'][t]}")
        return "、".join(res)

    @staticmethod
    def _marge_one_job_time(one_job_obj, start_time, end_time):
        _start_time, _end_time = one_job_obj.get_export_time()
        if (not start_time) or (start_time and _start_time < start_time):
            start_time = _start_time
        if (not end_time) or (end_time and _end_time > end_time):
            end_time = _end_time
        return start_time, end_time

    def _marge_one_job_ws_data(self, job_detail, one_job_obj, ws_task_type_count,
                               vul_category_count_map, vul_level_count_map, vul_title_count_map, vul_details,
                               ssl_category_count_map, ssl_level_count_map, ssl_title_count_map, ssl_details,
                               content_details, content_category_count_map, content_sub_category_map,
                               security_event_details, security_category_count_map, security_level_map,
                               change_check_details, change_check_category_count_map, change_check_level_map,
                               ws_task_detail):
        job = one_job_obj.job
        ws_task_data = one_job_obj.ws_task_data
        job_detail["is_monitor"] = True
        job_detail["task_have_error"] = ws_task_data.get("task_has_error", False)
        job_detail["url_count"] = ws_task_data.get("url_count", 0)
        job_detail["scan_count"] = ws_task_data.get("scan_count", {}).get("total", 0)
        job_level, msg = one_job_obj.get_job_level()
        job_detail["level"] = LEVEL_CN_NUM.get(job_level, 0)
        # 监测了， 安全状态未知 -1
        if job_level == "未知":
            job_detail["level"] = -1
        self.marge_dict_data(ws_task_type_count, ws_task_data.get("task_type_count", {}))
        task_type_warnings = ws_task_data.get("task_type_warnings", {})
        this_job_bug_level_map = {}
        # web
        vul_warning_maps = task_type_warnings.get("vul", {})
        self.marge_bug_data(job, "vul", vul_warning_maps, ws_task_detail, vul_category_count_map,
                            vul_level_count_map, this_job_bug_level_map, vul_title_count_map, vul_details)

        ssl_warning_maps = task_type_warnings.get("ssl", {})
        self.marge_bug_data(job, "ssl", ssl_warning_maps, ws_task_detail, ssl_category_count_map,
                            ssl_level_count_map, this_job_bug_level_map, ssl_title_count_map, ssl_details)
        job_detail.update({"bug": {
            "total": sum(this_job_bug_level_map.values()), "msg": self._get_bug_msg(this_job_bug_level_map)}})

        detail_list, category_count_map, sub_category_map = self.export_content_warning(
            job, task_type_warnings.get("content", {}), detail=ws_task_detail)
        content_details.extend(detail_list)
        self.marge_dict_data(content_category_count_map, category_count_map)
        self.marge_dict_add_job_count(content_sub_category_map, sub_category_map)
        job_detail.update({"content": {
            "total": sum(category_count_map.values()), "msg": self._get_content_msg(category_count_map)}})

        detail_list, category_count_map, _security_level_map = self.export_security_event_warning(
            job, task_type_warnings.get("securityEvent", {}), detail=ws_task_detail)
        security_event_details.extend(detail_list)
        self.marge_dict_data(security_category_count_map, category_count_map)
        self.marge_dict_add_job_count(security_level_map, _security_level_map)
        job_detail.update({"security": {
            "total": sum(category_count_map.values()), "msg": self._get_security_msg(category_count_map)}})

        detail_list, category_count_map, _change_check_level_map = self.export_change_check_warning(
            job, task_type_warnings.get("changeCheck", {}), detail=ws_task_detail)
        change_check_details.extend(detail_list)
        self.marge_dict_data(change_check_category_count_map, category_count_map)
        self.marge_dict_add_job_count(change_check_level_map, _change_check_level_map)
        job_detail.update({"change_check": {
            "total": sum(category_count_map.values()), "msg": self._get_change_check_msg(category_count_map)}})

    def _available_task_detail(self, job, task_type_warnings, detail_list):
        for task_type, warning_maps in task_type_warnings.items():
            for _, item in warning_maps.items():
                detail_list.append(
                    [
                        task_type.upper() if task_type.lower() == "http" else task_type.capitalize(),
                        item["target"],
                        job.targetUrl,
                        job.sourceIp or "--",
                        job.note,
                        utc_time_to_local(item["fundAt"], is_format=True),
                        item["detail"],
                        item["fundAt"]
                    ]
                )

    def _marget_one_job_available_task_data(self, one_job_obj, task_type_count, http_task_list,
                                            ping_task_list, detail_list, export_detail):
        job = one_job_obj.job
        available_task_data = one_job_obj.available_task_data
        self.marge_dict_data(task_type_count, available_task_data.get("task_type_count"))
        task_id_map = available_task_data.get("task_id_map", {})

        _http_task_list, _ping_task_list = one_job_obj.get_available_task_detail(task_id_map)
        http_task_list.extend(_http_task_list)
        ping_task_list.extend(_ping_task_list)
        if export_detail and (task_type_warnings := available_task_data.get("task_type_warnings", {})):
            self._available_task_detail(job, task_type_warnings, detail_list)

    def _marge_one_job_asset_task_data(self, one_job_obj, most_change_asset, task_type_count, change_type_map, detail_list, export_detail):
        asset_task_data = one_job_obj.asset_task_data
        job = one_job_obj.job
        self.marge_dict_data(task_type_count, asset_task_data.get("task_type_count"))
        if asset_task_data.get("has_changed_count"):
            _most_count = {"total": 0, "port": 0, "subdomain": 0}
            for detail in asset_task_data.get("changed_list"):
                change_type_map.setdefault(detail["change_type"], 0)
                change_type_map[detail["change_type"]] += 1
                _most_count.setdefault(detail["change_type"], 0)
                _most_count[detail["change_type"]] += 1
                _most_count["total"] += 1
                if export_detail:
                    detail_list.append(
                        [
                            ASSET_CHANGE_TYPE_CN[detail["change_type"]],
                            job.targetUrl,
                            job.sourceIp or "--",
                            job.note,
                            utc_time_to_local(detail["fundAt"], is_format=True),
                            detail["detail"],
                            detail["fundAt"]
                        ]
                    )
            if most_change_asset.get("total", 0) < asset_task_data.get("has_changed_count"):
                most_change_asset.update({
                    "note": job.note,
                    "target_url": job.targetUrl,
                    **_most_count
                })

    def export_ws_task_data(self, export_detail=True):
        # web 安全监测任务执行次数
        task_type_count = {"total": 0, **{t: 0 for t in WEB_SEC_TASK_TYPE}}
        # web 安全监测任务威胁详情
        ssl_details, vul_details, content_details, security_event_details, change_check_details = [], [], [], [], []
        # web 安全监测按照类型统计威胁
        ssl_category_count_map, vul_category_count_map, content_category_count_map, security_category_count_map, change_check_category_count_map = {}, {}, {}, {}, {}
        # ssl、web漏洞按照 威胁等级统计数量
        ssl_level_count_map, vul_level_count_map = {}, {}
        # ssl、web漏洞按照威胁名称统计数量
        ssl_title_count_map, vul_title_count_map = {}, {}
        # 内容违规按照威胁子类型统计数量
        content_sub_category_map = {}
        # 安全事件按照安全事件类型以及漏洞等级统计
        security_level_map = {}
        # 篡改按照按照篡改类型以及威胁等级统计
        change_check_level_map = {}
        # web 安全监测 监测URL数量
        scan_urls = 0
        # web 安全监测 监测次数
        scan_count = 0
        # 每一个资产web安全监测 结果
        job_list = []
        start_time, end_time = "", ""
        last_time = ""
        job_warning_level_map = {}
        all_job_count = 0
        for job_id in set(self.job_ids):
            try:
                one_job_obj = OneJobData(
                    job_id=job_id, report_type=self.report_type,
                    start_time=self.start_time, end_time=self.end_time
                )
            except JobDataError:
                continue
            all_job_count += 1
            one_job_obj._get_ws_task_source_data()
            job = one_job_obj.job
            job_detail = {
                "target_url": job.targetUrl,
                "target_note": job.note,
                "is_monitor": False,
                "task_have_error": False,
                "source_ip": job.sourceIp or "--",
                "url_count": 0,
                "scan_count": 0,
                "level": -2,  # -2 没有监测， 安全状态未知
                "bug": {"total": 0, "msg": ""},
                "security": {"total": 0, "msg": ""},
                "content": {"total": 0, "msg": ""},
                "change_check": {"total": 0, "msg": ""},
            }
            ws_task_data = one_job_obj.ws_task_data
            if ws_task_data.get("is_monitor"):
                start_time, end_time = self._marge_one_job_time(one_job_obj, start_time, end_time)
                if (not last_time) or (last_time < ws_task_data.get("last_time")):
                    last_time = ws_task_data.get("last_time")
                self._marge_one_job_ws_data(
                    job_detail, one_job_obj, task_type_count,
                    vul_category_count_map, vul_level_count_map, vul_title_count_map, vul_details,
                    ssl_category_count_map, ssl_level_count_map, ssl_title_count_map, ssl_details,
                    content_details, content_category_count_map, content_sub_category_map,
                    security_event_details, security_category_count_map, security_level_map,
                    change_check_details, change_check_category_count_map, change_check_level_map,
                    export_detail)
                scan_urls += job_detail["url_count"]
                scan_count += job_detail["scan_count"]
            if job_detail["level"] >= 2:
                job_warning_level_map.setdefault(job_detail["level"], 0)
                job_warning_level_map[job_detail["level"]] += 1
            job_list.append(job_detail)

        job_warning_level_list = [(LEVEL_NUM_STR[_[0]]["cn"], _[1]) for _ in sorted(
            job_warning_level_map.items(), key=lambda x:x[0], reverse=True)]
        task_warning_type_count = {
            "vul": sum(vul_category_count_map.values()),
            "ssl": sum(ssl_category_count_map.values()),
            "content": sum(content_category_count_map.values()),
            "securityEvent": sum(security_category_count_map.values()),
            "changeCheck": change_check_category_count_map.pop("total", 0)
        }
        task_warning_type_count["total"] = sum(task_warning_type_count.values())

        vul_title_count_map.update(ssl_title_count_map)
        bug_title_count_list = [_ for _ in sorted(
            vul_title_count_map.values(), key=lambda x: (x["level"], x["count"]), reverse=True)]
        bug_category_count_map = {}
        for category, count in vul_category_count_map.items():
            bug_category_count_map.update({WARNING_CATEGORY_KEY_LABEL_MAP["vul"].get(category, category): count})
        for category, count in ssl_category_count_map.items():
            bug_category_count_map.update({WARNING_CATEGORY_KEY_LABEL_MAP["ssl"].get(category, category): count})
        bug_details = vul_details + ssl_details
        # 按照最新发现时间排序
        bug_details = list(sorted(bug_details, key=lambda x: x[14]))

        content_sub_category_list = [_ for _ in sorted(
            content_sub_category_map.values(), key=lambda x: (x["level"], x["count"]), reverse=True)]
        content_details = list(sorted(content_details, key=lambda x: x[8]))

        security_level_list = [_ for _ in sorted(
            security_level_map.values(), key=lambda x: (x["level"], x["count"]), reverse=True)]
        security_event_details = list(sorted(security_event_details, key=lambda x: x[8]))

        change_check_level_list = [_ for _ in sorted(
            change_check_level_map.values(), key=lambda x: (x["level"], x["count"]), reverse=True)]
        # 按照最新发现时间排序
        change_check_details = list(sorted(change_check_details, key=lambda x: x[12]))

        task_type_count["bug"] = task_type_count["vul"] + task_type_count["ssl"]
        task_warning_type_count["bug"] = task_warning_type_count["vul"] + task_warning_type_count["ssl"]
        ws_task_data = {
            "last_time": last_time,
            "start_time": start_time,
            "end_time": end_time,
            "scan_urls": scan_urls,
            "scan_count": scan_count,
            "task_type_count": task_type_count,
            "job_warning_level_list": job_warning_level_list,
            "job_list": list(sorted(job_list, key=lambda x: x["level"], reverse=True)),
            "task_warning_type_count": task_warning_type_count,

            "bug_details": bug_details,
            "bug_title_count_list": bug_title_count_list,
            "bug_category_count_map": bug_category_count_map,

            "content_details": content_details,
            "content_sub_category_list": content_sub_category_list,
            "content_category_count_map": {WARNING_CATEGORY_KEY_LABEL_MAP["content"][t]: v for t, v in
                                           content_category_count_map.items()},

            "security_event_details": security_event_details,
            "security_level_list": security_level_list,
            "security_category_count_map": {WARNING_CATEGORY_KEY_LABEL_MAP["securityEvent"][t]: v for t, v in
                                            security_category_count_map.items()},

            "change_check_details": change_check_details,
            "change_check_level_list": change_check_level_list,
            "change_check_category_count_map": {WARNING_CATEGORY_KEY_LABEL_MAP["changeCheck"][t]: v for t, v in
                                                change_check_category_count_map.items()}
        }
        return ws_task_data

    def export_asset_task_data(self, export_detail=True):
        start_time, end_time = "", ""
        is_monitor_job_count = 0
        all_job_count = 0
        has_change_job_count = 0
        task_type_count = {"total": 0, "asset": 0}
        detail_list = []
        change_type_map = {}
        most_change_asset = {}
        for job_id in set(self.job_ids):
            try:
                one_job_obj = OneJobData(
                    job_id=job_id, report_type=self.report_type,
                    start_time=self.start_time, end_time=self.end_time
                )
            except JobDataError:
                continue
            all_job_count += 1
            one_job_obj._get_asset_task_source_data()
            asset_task_data = one_job_obj.asset_task_data
            if one_job_obj.asset_task_data.get("is_monitor"):
                is_monitor_job_count += 1
                start_time, end_time = self._marge_one_job_time(one_job_obj, start_time, end_time)
                if asset_task_data.get("has_changed_count"):
                    has_change_job_count += 1
                self._marge_one_job_asset_task_data(
                    one_job_obj, most_change_asset, task_type_count, change_type_map, detail_list, export_detail)
        detail_list = list(sorted(detail_list, key=lambda x: x[-1]))
        asset_task_data = {
            "task_type_count": task_type_count,
            "is_monitor_job_count": is_monitor_job_count,
            "start_time": start_time,
            "end_time": end_time,
            "all_job_count": all_job_count,
            "has_change_job_count": has_change_job_count,
            "detail_list": detail_list,
            "change_type_map": change_type_map,
            "most_change_asset": most_change_asset
        }
        return asset_task_data

    def export_available_task_data(self, export_detail=False):
        start_time, end_time = "", ""
        is_monitor_job_count = 0
        all_job_count = 0
        has_error_job_count = 0
        task_type_count = {"total": 0, **{t: 0 for t in AVAILABLE_TASK_TYPE}}
        detail_list = []
        http_task_list, ping_task_list = [], []
        for job_id in set(self.job_ids):
            try:
                one_job_obj = OneJobData(
                    job_id=job_id, report_type=self.report_type,
                    start_time=self.start_time, end_time=self.end_time
                )
            except JobDataError:
                continue
            all_job_count += 1
            one_job_obj._get_available_task_source_data()
            available_task_data = one_job_obj.available_task_data
            if one_job_obj.available_task_data.get("is_monitor"):
                start_time, end_time = self._marge_one_job_time(one_job_obj, start_time, end_time)
                is_monitor_job_count += 1
                if available_task_data.get("has_error"):
                    has_error_job_count += 1
                self._marget_one_job_available_task_data(
                    one_job_obj, task_type_count, http_task_list,
                    ping_task_list, detail_list, export_detail)
        http_task_list = list(sorted(http_task_list, key=lambda x: x["availability_ratio"]))
        ping_task_list = list(sorted(ping_task_list, key=lambda x: x["availability_ratio"]))
        warning_http_task_list = [item for item in http_task_list if item["is_warning"]]
        warning_ping_task_list = [item for item in ping_task_list if item["is_warning"]]
        lowest_ratio_target = ""
        if warning_http_task_list:
            _item = warning_http_task_list[0]
            lowest_ratio_target = f"{_item['name']}({_item['target']})"
        http_task_list = http_task_list[:3]
        if warning_ping_task_list:
            _item = warning_ping_task_list[0]
            lowest_ratio_target = f"{_item['name']}({_item['target']})" if not lowest_ratio_target else lowest_ratio_target
        ping_task_list = ping_task_list[:3]
        detail_list = list(sorted(detail_list, key=lambda x:x[-1]))
        available_task_data = {
            "task_type_count": task_type_count,
            "is_monitor_job_count": is_monitor_job_count,
            "start_time": start_time,
            "end_time": end_time,
            "all_job_count": all_job_count,
            "has_error_job_count": has_error_job_count,
            "detail_list": detail_list,
            "lowest_ratio_target": lowest_ratio_target,
            "http_task_list": http_task_list,
            "ping_task_list": ping_task_list,
            "warning_task_list": warning_http_task_list + warning_ping_task_list
        }
        return available_task_data
