"""
报告导出
"""
import time
import os
import datetime
import arrow
from io import BytesIO
import shutil

import jinja2
import openpyxl
from docxtpl import DocxTemplate, InlineImage
from docx.shared import Mm

from app.reports.data import OneJobData, AVAILABLE_TASK_TYPE, WEB_SEC_TASK_TYPE, MutilJobData
from app.reports.echarts import (
    get_pie_charts_image,
    get_bar_charts_image,
    get_line_charts_image
)
from app.reports.utils import (
    get_report_dir,
    export_zip,
    write_sheet,
    check_report_filename
)
from app.db.models.celery_task import CeleryTask
from app.db.models.tasks import Task
from app.db.models.jobs import Job
from app.db.models.asset_tasks import AssetTasks
from app.errors import ParameterError
from app.config.settings import BASE_DIR, DATETIME_FMT, SPIDER_URL_TIME
from app.libs.display import (
    LEVEL_EN_CN, WARNING_CATEGORY_KEY_LABEL_MAP, LEVEL_NUM_STR, ASSET_CHANGE_TYPE_CN
)
from app.libs.enums import TaskType, TASK_TYPE_CONFIG


def update_report_task_process(celery_obj, process, file_io=None, filename=None):
    if not celery_obj:
        return
    if process < 100:
        celery_obj.update(result__status="pending", result__process=process)
    elif file_io and filename and process >= 100:
        celery_obj.result.file.put(file_io.getvalue())
        celery_obj.result.create_at = datetime.datetime.utcnow()
        celery_obj.result.filename = filename
        celery_obj.result.status = "success"
        celery_obj.result.process = 100
        celery_obj.save()


class ReportExport(object):

    def __init__(self, template):
        self.tpl = DocxTemplate(template)
        self.jinja_env = jinja2.Environment()
        self._register_filter(self.jinja_env)

    def _register_filter(self, jinja_env):
        """
        将所有{}_filter样例的内置函数注册到jinja2环境变量内，类初始化的调用
        """
        for attr in self.__dir__():
            if attr.endswith("_filter"):
                jinja_env.filters[attr] = self.__getattribute__(attr)

    @staticmethod
    def no_data_filter(data):
        """没有值时展示暂无数据"""
        if not data:
            return "暂无数据"
        return data

    def save(self, context, is_file=False, path=None):
        self.tpl.render(context=context, jinja_env=self.jinja_env, autoescape=True)
        if is_file:
            self.tpl.save(filename=path)
            return path
        else:
            file_io = BytesIO()
            self.tpl.save(file_io)
            file_io.seek(0)
            return file_io

    def write_inline_image(self, image, width=Mm(132), height=Mm(90)):
        return InlineImage(
            self.tpl,
            image_descriptor=image,
            width=width,
            height=height,
        )


class _OneAndMutilCommon:

    def get_bar_image(self, data, title=None):
        values, x, yn_list = [], [], []
        for t, v in data.items():
            values.append(v or 0)
            x.append(LEVEL_EN_CN.get(t) or t)
            yn_list.append(LEVEL_EN_CN.get(t) or t)
        image = get_bar_charts_image(x=x, yv_list=[values, ], yn_list=yn_list, layout_dict={"title": title or ""})
        return self.tpl.write_inline_image(image=image, width=Mm(93), height=Mm(66))

    def get_ssl_bar_image(self, data):
        values, x, yn_list = [], [LEVEL_NUM_STR[level]["cn"] for level in sorted(LEVEL_NUM_STR.keys(), reverse=True) if level >= 2], []
        for category, item in data.items():
            item = {LEVEL_NUM_STR[level]["cn"]: item.get(LEVEL_NUM_STR[level]["en"], 0)
                    for level in sorted(LEVEL_NUM_STR.keys(), reverse=True) if level >= 2}
            yn_list.append(WARNING_CATEGORY_KEY_LABEL_MAP["ssl"].get(category, category))
            values.append([_ or 0 for cn, _ in item.items()])
        color_list = ['rgba(91,170,249,100)', 'rgba(97,221,170,100)']
        image = get_bar_charts_image(
            x=x, yv_list=values, yn_list=yn_list, color_list=color_list, layout_dict={"title": "SSL安全数量"})
        return self.tpl.write_inline_image(image=image, width=Mm(93), height=Mm(66))

    def get_pie_image(self, data):
        values, text, labels = [], [], []
        for t, v in data.items():
            values.append(v)
            text.append(t)
            labels.append(f"{t} {v}")
        image = get_pie_charts_image(text=text, values=values, labels=labels)
        return self.tpl.write_inline_image(image=image)

    @staticmethod
    def get_cn_category_data(task_type, data):
        category_data = {WARNING_CATEGORY_KEY_LABEL_MAP[task_type].get(k, k): v for k, v in data.items()}
        return category_data

    @staticmethod
    def get_available_line_image_data(data_list):
        if not data_list:
            return {}
        result = {}
        _date_list = []
        for data in data_list:
            date_ratio = data.pop("date_ratio", {})

            for d in date_ratio["date"]:
                _date_list.append(datetime.datetime.strptime(d, "%Y-%m-%d"))
            _draw_ratio = {}
            for i, s in enumerate(date_ratio["date"]):
                _draw_ratio.update({s: date_ratio["ratio"][i]})
            data.update({"date_ratio": _draw_ratio})
            result.update({f"({data['name']}){data['target']}": data})
        max_date = max(_date_list)
        min_date = min(_date_list)
        date_list = []
        for i in range((max_date - min_date).days + 1):
            date_list.append((min_date + datetime.timedelta(days=i)).strftime("%Y-%m-%d"))
        yv_list = []
        yn_list = []
        for _, item in result.items():
            draw_data = item["date_ratio"]
            yv = []
            for s in date_list:
                # "" 空字符串 表示不画点
                yv.append("" if draw_data.get(s, None) is None else draw_data.get(s))
            yv_list.append(yv)
            yn_list.append(item["name"])
        return {"x": date_list, "yv_list": yv_list, "yn_list": yn_list}

    def get_available_line_image(self, data):
        http_x = data.get("x", [])
        http_yv_list = data.get("yv_list", [])
        http_yn_list = data.get("yn_list", [])
        http_layout_dict = {
            "xaxis_nticks": 6 if len(http_x) > 6 else len(http_x),
            "yaxis_showline": True,
            "yaxis_range": (0, 110),
            "yaxis_tickvals": [10, 20, 30, 40, 50, 60, 70, 80, 90, 100, 110],
            "yaxis_ticktext": [f"{i}%" for i in (10, 20, 30, 40, 50, 60, 70, 80, 90, 100, 110)],
            "template": "simple_white",
            "xaxis_type": "date",
            "xaxis_tickformat": "%Y-%m-%d"
        }
        image = get_line_charts_image(
            x=http_x,
            yv_list=http_yv_list,
            yn_list=http_yn_list,
            text=[[f"{v}%" for v in yv] for yv in http_yv_list],
            layout_dict=http_layout_dict
        )
        return self.tpl.write_inline_image(image=image, width=Mm(132), height=Mm(85))

    @staticmethod
    def get_asset_task_bar_data(data):
        result = {}
        for _, item in data.items():
            result.update({item["cn"]: item["count"]})
        return result


class OneReportExportBase(_OneAndMutilCommon):

    def __init__(self, tpl: ReportExport, data_obj: OneJobData, *args, **kwargs):
        self.tpl = tpl
        self.source_data = data_obj
        self.context = {}

    def parse_ws_task_data(self):
        ws_task_data = self.source_data.export_ws_task_data()
        if ws_task_data.get("vul", {}).get("count"):
            vul_data = ws_task_data.get("vul", {})
            category_data = self.get_cn_category_data("vul", vul_data.get("category_count_map", {}))
            level_count_map = vul_data.get("level_count_map", {})
            level_count_map = {LEVEL_NUM_STR[level]["cn"]: level_count_map.get(LEVEL_NUM_STR[level]["en"], 0)
                               for level in sorted(LEVEL_NUM_STR.keys(), reverse=True) if level >= 2}
            vul_data.update({
                "vul_bar": self.get_bar_image(data=level_count_map, title="Web漏洞数量"),
                "vul_pie": self.get_pie_image(data=category_data)
            })
        if ws_task_data.get("content", {}).get("count"):
            content_data = ws_task_data.get("content", {})
            category_data = self.get_cn_category_data("content", data=content_data.get("category_count_map", {}))
            content_data.update({
                "content_bar": self.get_bar_image(data=category_data, title="内容违规数量"),
                "content_pie": self.get_pie_image(data={k: v for k, v in category_data.items() if v})
            })
        if ws_task_data.get("ssl", {}).get("count"):
            ssl_data = ws_task_data.get("ssl", {})
            category_data = self.get_cn_category_data("ssl", data=ssl_data.get("category_count_map", {}))
            ssl_data.update({
                "ssl_bar": self.get_ssl_bar_image(data=ssl_data.get("level_count_map", {})),
                "ssl_pie": self.get_pie_image(data=category_data)
            })
        if ws_task_data.get("security_event", {}).get("count"):
            security_event_data = ws_task_data.get("security_event", {})
            category_data = self.get_cn_category_data("securityEvent", data=security_event_data.get("category_count_map", {}))
            security_event_data.update({
                "security_event_bar": self.get_bar_image(data=category_data, title="安全事件数量"),
                "security_event_pie": self.get_pie_image(data={k: v for k, v in category_data.items() if v})
            })
        if ws_task_data.get(TASK_TYPE_CONFIG[TaskType.change_check.value]["line"], {}).get("count"):
            change_check_data = ws_task_data.get(TASK_TYPE_CONFIG[TaskType.change_check.value]["line"], {})
            category_data = self.get_cn_category_data(TaskType.change_check.value, data=change_check_data.get("category_count_map", {}))
            change_check_data.update({
                "change_check_bar": self.get_bar_image(data=category_data, title="篡改数量"),
            })
        self.context.update({"ws_task": ws_task_data})
        return ws_task_data

    def parse_asset_task_data(self):
        asset_task_data = self.source_data.export_asset_task_data()
        if asset_task_data.get("has_changed_count"):
            bar_data = self.get_asset_task_bar_data(asset_task_data.get("change_op_count"))
            asset_task_data.update({"asset_bar": self.get_bar_image(data=bar_data, title="资产变动数量")})
        self.context.update({"asset_task": asset_task_data})
        return asset_task_data

    def parse_available_task_data(self):
        available_task_data = self.source_data.export_available_task_data()
        if available_task_data.get("has_error_target_count", {}).get("total"):
            category_data = {
                t: available_task_data.get("has_error_target_count", {}).get(t, 0) for t in AVAILABLE_TASK_TYPE
            }
            available_task_data.update({
                "available_bar": self.get_bar_image(data=category_data, title="可用性异常数量"),
            })
        if http_list := available_task_data.get("http_task_list", []):
            http_line_data = self.get_available_line_image_data(http_list)
            available_task_data.update({
                "http_line": self.get_available_line_image(http_line_data)
            })
        if ping_list := available_task_data.get("ping_task_list", []):
            ping_line_data = self.get_available_line_image_data(ping_list)
            available_task_data.update({
                "ping_line": self.get_available_line_image(ping_line_data)
            })
        self.context.update({"available_task": available_task_data})
        return available_task_data

    def parse_overview_data(self):
        self.context = {
            "job_name": self.source_data.job.note,
            "target_url": self.source_data.job.targetUrl,
            "source_ip": self.source_data.job.sourceIp or "--",
            "report_time": datetime.datetime.now().strftime(DATETIME_FMT),
            "is_range": self.source_data.report_type == "range",
            "year": str(datetime.datetime.now().year)  # 页脚年份
        }
        job_level, job_level_msg = self.source_data.get_job_level()
        self.context.update({
            "job_level": job_level,
            "job_level_msg": job_level_msg
        })
        start_time, end_time = self.source_data.get_export_time()
        self.context.update({
            "start_time": start_time.strftime(DATETIME_FMT),
            "end_time": end_time.strftime(DATETIME_FMT),
        })
        ws_last_time = self.source_data.ws_task_data.get("last_time")
        self.context.update({
            "is_show_url": arrow.get(SPIDER_URL_TIME).datetime < arrow.get(ws_last_time).datetime if ws_last_time else True
        })

    def get_context(self):
        self.parse_overview_data()
        self.parse_ws_task_data()
        self.parse_asset_task_data()
        self.parse_available_task_data()
        return self.context

    def save(self, is_file=False, path=None):
        return self.tpl.save(context=self.context, is_file=is_file, path=path)


class OneJobReportExport(OneReportExportBase):

    def __init__(self, job_id, report_type, start_time=None, end_time=None, **kwargs):
        tpl = ReportExport(template=os.path.join(BASE_DIR, "files/单资产ScanV监测报告模版.docx"))
        source_data = OneJobData(job_id, report_type, start_time=start_time, end_time=end_time)
        super(OneJobReportExport, self).__init__(tpl=tpl, data_obj=source_data)

    def single_export(self, celery_obj: CeleryTask = None, is_file=None, path=None):
        if celery_obj:
            update_report_task_process(celery_obj=celery_obj, process=5)

            self.parse_overview_data()
            update_report_task_process(celery_obj=celery_obj, process=25)

            self.parse_ws_task_data()
            update_report_task_process(celery_obj=celery_obj, process=45)

            self.parse_asset_task_data()
            update_report_task_process(celery_obj=celery_obj, process=65)

            self.parse_available_task_data()
            update_report_task_process(celery_obj=celery_obj, process=80)

            file_io = self.save()
            update_report_task_process(celery_obj=celery_obj, process=95)

            # 7、写入数据库
            filename = f'[{self.source_data.job.note}]ScanV安全监测报告.docx'
            update_report_task_process(celery_obj=celery_obj, process=100, file_io=file_io, filename=filename)
            return celery_obj.id
        else:
            self.get_context()
            return self.save(is_file=is_file, path=path)


class OneTaskReportExport(OneReportExportBase):

    def __init__(self, task_id):
        task = Task.objects.filter(id=task_id).first()
        if not task or task.taskType not in WEB_SEC_TASK_TYPE:
            raise ParameterError(msg=f"{ task_id }任务不存在")
        template_map = {
            "vul": "ScanV单次报告_WEB漏洞.docx",
            "ssl": "ScanV单次报告_SSL安全.docx",
            "securityEvent": "ScanV单次报告_安全事件.docx",
            "content": "ScanV单次报告_内容合规.docx",
            TaskType.change_check.value: "ScanV单次报告_篡改监测.docx",
        }
        tpl = ReportExport(template=os.path.join(BASE_DIR, f"files/{template_map[task.taskType]}"))
        source_data = OneJobData(job_id=task.jobId, report_type="latest", web_sec_task_query={"_id": task.id})
        super(OneTaskReportExport, self).__init__(tpl=tpl, data_obj=source_data)


class TaskGroupReportExport(OneReportExportBase):

    def __init__(self, asset_task_id):
        if not (at := AssetTasks.objects.filter(id=asset_task_id).first()):
            raise ParameterError(msg="任务不存在")
        tpl = ReportExport(template=os.path.join(BASE_DIR, "files/任务组最近一次监测报告模版.docx"))
        task_ids = []
        for t in WEB_SEC_TASK_TYPE:
            if task_id := getattr(at, f"{t}ResultId"):
                task_ids.append(task_id)
        source_data = OneJobData(job_id=at.jobId, report_type="latest", web_sec_task_query={"_id": {"$in": task_ids}})
        super(TaskGroupReportExport, self).__init__(tpl=tpl, data_obj=source_data)


class MutilJobReportExport(_OneAndMutilCommon):

    def __init__(self, job_ids, report_type, start_time=None, end_time=None, **kwargs):
        self.tpl = ReportExport(template=os.path.join(BASE_DIR, "files/多资产ScanV监测汇总报告模版.docx"))
        self.source_data = MutilJobData(job_ids, report_type, start_time=start_time, end_time=end_time)
        self.context = {
            "is_range": report_type == "range",
            "is_show_url": True,
            "report_time": datetime.datetime.now().strftime(DATETIME_FMT),
            "excel_name": "",
            "year": str(datetime.datetime.now().year)  # 页脚年份
        }
        self.excel_data = {"bug_list": [], "security_list": [], "content_list": [],
                           "available_list": [], "asset_list": [], "change_check_list": []}

    @staticmethod
    def format_detail(details, length):
        result = []
        for index, data in enumerate(details):
            result.append([index + 1, ] + data[:length])
        return result

    def report_range_time(self, start_time, end_time):
        if not all([start_time, end_time]):
            return
        if (not self.context.get("start_time")) or self.context.get("start_time") > start_time:
            self.context.update({"start_time": start_time})
        if (not self.context.get("end_time")) or self.context.get("end_time") < end_time:
            self.context.update({"end_time": end_time})

    def parse_ws_task_data(self, export_detail=True):
        ws_task_data = self.source_data.export_ws_task_data(export_detail)
        start_time = ws_task_data.get("start_time")
        end_time = ws_task_data.get("end_time")
        self.report_range_time(start_time, end_time)
        if ((last_time := ws_task_data.pop("last_time", "")) and
                (arrow.get(SPIDER_URL_TIME).datetime > arrow.get(last_time).datetime)):
            self.context.update({"is_show_url": False})
        if bug_category_count_map := ws_task_data.pop("bug_category_count_map", {}):
            ws_task_data.update({"bug_category_pie": self.get_pie_image(bug_category_count_map)})
        if content_category_count_map := ws_task_data.pop("content_category_count_map", {}):
            content_category_count_map = self.get_cn_category_data("content", content_category_count_map)
            ws_task_data.update({"content_category_pie": self.get_pie_image(content_category_count_map)})
        if security_category_count_map := ws_task_data.pop("security_category_count_map", {}):
            security_category_count_map = self.get_cn_category_data("securityEvent", security_category_count_map)
            ws_task_data.update({"security_category_pie": self.get_pie_image(security_category_count_map)})
        if change_check_category_count_map := ws_task_data.pop("change_check_category_count_map", {}):
            change_check_category_count_map = self.get_cn_category_data("changeCheck", change_check_category_count_map)
            ws_task_data.update({"change_check_category_pie": self.get_pie_image(change_check_category_count_map)})
        if security_event_details := ws_task_data.pop("security_event_details", []):
            self.excel_data.update({"security_list": self.format_detail(security_event_details, 8)})
        if bug_details := ws_task_data.pop("bug_details", []):
            self.excel_data.update({"bug_list": self.format_detail(bug_details, 14)})
        if content_details := ws_task_data.pop("content_details", []):
            self.excel_data.update({"content_list": self.format_detail(content_details, 8)})
        if change_check_details := ws_task_data.get("change_check_details", []):
            self.excel_data.update({"change_check_list": self.format_detail(change_check_details, 11)})
        self.context.update({"ws_task_data": ws_task_data})
        return ws_task_data

    def parse_available_task_data(self, export_detail=True):
        available_task_data = self.source_data.export_available_task_data(export_detail)
        start_time = available_task_data.get("start_time")
        end_time = available_task_data.get("end_time")
        self.report_range_time(start_time, end_time)
        if http_list := available_task_data.get("http_task_list", []):
            http_line_data = self.get_available_line_image_data(http_list)
            available_task_data.update({
                "http_line": self.get_available_line_image(http_line_data)
            })
        if ping_list := available_task_data.get("ping_task_list", []):
            ping_line_data = self.get_available_line_image_data(ping_list)
            available_task_data.update({
                "ping_line": self.get_available_line_image(ping_line_data)
            })
        if detail_list := available_task_data.pop("detail_list", []):
            self.excel_data.update({"available_list": self.format_detail(detail_list, 7)})
        self.context.update({"available_task_data": available_task_data})
        return available_task_data

    def parse_asset_task_data(self, export_detail=True):
        asset_task_data = self.source_data.export_asset_task_data(export_detail)
        start_time = asset_task_data.get("start_time")
        end_time = asset_task_data.get("end_time")
        self.report_range_time(start_time, end_time)
        if change_type_map := asset_task_data.pop("change_type_map", {}):
            change_type_map_cn = {}
            for _, v in change_type_map.items():
                change_type_map_cn.update({ASSET_CHANGE_TYPE_CN[_]: v})
            asset_task_data.update({"asset_pie": self.get_pie_image(change_type_map_cn)})
        if detail_list := asset_task_data.pop("detail_list", []):
            self.excel_data.update({"asset_list": self.format_detail(detail_list, 6)})
        self.context.update({"asset_task_data": asset_task_data})
        return asset_task_data

    def parse_overview_data(self):
        if job_list := self.context.get("ws_task_data", {}).get("job_list", []):
            self.context.update({"job_count": len(job_list)})
        elif job_count := self.context.get("available_task_data", {}).get("all_job_count", 0):
            self.context.update({"job_count": job_count})
        elif job_count := self.context.get("asset_task_data", {}).get("all_job_count", 0):
            self.context.update({"job_count": job_count})
        else:
            self.context.update({"job_count": 0})
        if start_time := self.context.get("start_time"):
            self.context.update({"start_time": start_time.strftime(DATETIME_FMT)})
        if end_time := self.context.get("end_time"):
            self.context.update({"end_time": end_time.strftime(DATETIME_FMT)})
        if self.source_data.report_type == "range" and not self.context.get("start_time"):
            self.context.update({
                "start_time": arrow.get(self.source_data.start_time).datetime.strftime(DATETIME_FMT),
                "end_time": arrow.get(self.source_data.end_time).datetime.strftime(DATETIME_FMT),
            })

    def get_context(self, ws_task_detail=True, asset_task_detail=True, available_task_detail=True):
        self.parse_ws_task_data(ws_task_detail)
        self.parse_asset_task_data(asset_task_detail)
        self.parse_available_task_data(available_task_detail)
        self.parse_overview_data()
        return self.context

    def export_excel(self, filename):
        bug_header = ["序号", "漏洞名称", "威胁等级", "漏洞类型", "风险地址", "影响资产地址", "源站IP", "资产名称",
                      "最新发现时间", "请求内容", "响应内容", "Payload", "漏洞详情", "漏洞描述", "修复建议"]
        content_header = ["序号", "内容违规名称", "威胁等级", "风险地址", "影响资产地址", "源站IP", "资产名称",
                          "最新发现时间", "详情"]
        security_header = ["序号", "安全事件名称", "威胁等级", "风险地址", "影响资产地址", "源站IP", "资产名称",
                           "最新发现时间", "详情"]
        change_check_header = ["序号", "变更类型", "威胁等级", "风险地址", "影响资产地址", "源站IP", "资产名称",
                               "最新发现时间", "变更状态", "变更比例/变更资源数", "样本详情", "当前详情"]
        available_header = ["序号", "监测类型", "监测目标", "影响资产地址", "源站IP", "资产名称", "最新发现时间", "详情"]
        asset_header = ["序号", "变动类型", "资产地址", "源站IP", "资产名称", "最新发现时间", "详情"]
        worker_excel = openpyxl.Workbook()
        active = 0
        bug_list = self.excel_data["bug_list"]
        active_sheet, active = write_sheet(worker_excel, active, "漏洞", bug_header, bug_list)

        security_list = self.excel_data["security_list"]
        active_sheet, active = write_sheet(worker_excel, active, "安全事件", security_header, security_list)

        content_list = self.excel_data["content_list"]
        active_sheet, active = write_sheet(worker_excel, active, "内容违规", content_header, content_list)

        change_check_list = self.excel_data["change_check_list"]
        active_sheet, active = write_sheet(worker_excel, active, "篡改", change_check_header, change_check_list)

        available_list = self.excel_data["available_list"]
        active_sheet, active = write_sheet(worker_excel, active, "可用性", available_header, available_list)

        asset_list = self.excel_data["asset_list"]
        write_sheet(worker_excel, active, "资产变动", asset_header, asset_list)
        worker_excel.save(filename)

    def export_word(self, path):
        return self.tpl.save(self.context, is_file=True, path=path)

    def export(self, celery_obj: CeleryTask = None):
        dir_path = get_report_dir(task_id=str(celery_obj.id) if celery_obj else f"report_tmp_{int(time.time() * 1000)}")
        time_str = datetime.datetime.now().strftime('%Y%m%d%H%M%S')
        filename = f"ScanV安全监测报告{time_str}.zip"
        excel_filename = f"监测结果明细{time_str}.xlsx"
        word_filename = f"ScanV安全监测报告{time_str}.docx"
        excel_file_path = os.path.join(dir_path, excel_filename)
        word_file_path = os.path.join(dir_path, word_filename)
        self.context.update({"excel_name": excel_filename})
        if celery_obj:
            update_report_task_process(celery_obj=celery_obj, process=5)

            self.parse_ws_task_data()
            update_report_task_process(celery_obj=celery_obj, process=27)

            self.parse_asset_task_data()
            update_report_task_process(celery_obj=celery_obj, process=43)

            self.parse_available_task_data()
            update_report_task_process(celery_obj=celery_obj, process=68)

            self.parse_overview_data()
            update_report_task_process(celery_obj=celery_obj, process=77)

            self.export_excel(excel_file_path)
            update_report_task_process(celery_obj=celery_obj, process=85)

            self.export_word(word_file_path)
            update_report_task_process(celery_obj=celery_obj, process=97)

            file_io = export_zip([excel_file_path, word_file_path])
            # 7、写入数据库
            update_report_task_process(celery_obj=celery_obj, process=100, file_io=file_io, filename=filename)
            shutil.rmtree(dir_path)
            return celery_obj.id
        else:
            self.get_context()
            self.export_excel(excel_file_path)
            self.export_word(path=word_file_path)
            zip_path = os.path.join(os.path.dirname(dir_path), filename)
            export_zip([excel_file_path, word_file_path], zip_path)
            shutil.rmtree(dir_path)
            return zip_path


def _export_zip_report(celery_obj, job_ids, report_type, start_time, end_time):
    base_dir = get_report_dir(task_id=str(celery_obj.id) if celery_obj else f"report_tmp_{int(time.time() * 1000)}")
    jobs = Job.objects.filter(id__in=job_ids)
    job_count = jobs.count()
    completed_count = 0
    job_names = []
    path_list = []
    update_report_task_process(celery_obj, process=5)
    for job in jobs:
        job_names.append(job.note)
        export_obj = OneJobReportExport(
            job_id=job.id, report_type=report_type, start_time=start_time, end_time=end_time
        )
        filename, word_path = check_report_filename(job.note, job.target.host, base_dir)
        export_obj.get_context()
        export_obj.save(is_file=True, path=word_path)
        completed_count += 1
        path_list.append(word_path)
        if celery_obj:
            update_report_task_process(celery_obj, process=int(round(completed_count / job_count, 2) * 100 * 0.9))
    filename = f"ScanV安全监测报告{datetime.datetime.now().strftime('%Y%m%d%H%M%S')}.zip"
    if not celery_obj:
        zip_path = os.path.join(os.path.dirname(base_dir), filename)
        export_zip(path_list, path=zip_path)
        shutil.rmtree(base_dir)
        return zip_path
    # 7、写入数据库
    file_io = export_zip(file_list=path_list)
    update_report_task_process(celery_obj, process=100, file_io=file_io, filename=filename)
    # 删除文件
    shutil.rmtree(base_dir)
    return celery_obj.id


def export_port(
        task_obj_id=None,
        asset_task_id=None,
        celery_task_id=None,
        job_ids: list = None,
        report_type=None,
        download_type=None,
        start_time=None,
        end_time=None,
        is_file=False,
        path=None,
        file_type=None
):
    """
    task_obj_id 导出单项监测任务报告 tasks.id， 同步下载
    celery_task_id 异步导出资产报告， celery_task.id
    """
    # 单项监测任务报告
    if task_obj_id:
        export_obj = OneTaskReportExport(task_id=task_obj_id)
        export_obj.get_context()
        return export_obj.save()
    if asset_task_id:
        export_obj = TaskGroupReportExport(asset_task_id=asset_task_id)
        export_obj.get_context()
        return export_obj.save()
    celery_obj = None
    if celery_task_id:
        celery_obj = CeleryTask.objects.filter(id=celery_task_id, task_type="report_download").first()
        if not celery_obj:
            raise ParameterError(msg="报告导出任务不存在")
        params = celery_obj.params.to_dict()
        job_ids = {_["job_id"] for _ in params.get("job_ids", []) if _.get("job_id")}
        report_type = params.get("report_type")
        download_type = params.get("download_type")
        start_time = params.get("start_date")
        end_time = params.get("end_date")
    if not job_ids:
        raise ParameterError(msg="没有提供需要导出报告的资产")
    job_ids = list(job_ids)
    if (report_type not in ["range", "latest"]) or (report_type == "range" and not all([start_time, end_time])):
        raise ParameterError(msg="报告导出时间选择错误")
    if download_type not in ["0", "1", 0, 1]:
        raise ParameterError(msg="报告导出类型错误")
    download_type = int(download_type)
    # download_type == 0 合并导出报告
    if download_type == 0:
        export_obj = MutilJobReportExport(job_ids, report_type, start_time, end_time)
        return export_obj.export(celery_obj=celery_obj)
    # 一个资产, 且明确表示下载word文件
    elif len(job_ids) == 1 and file_type == "docx":
        export_obj = OneJobReportExport(
            job_id=job_ids[0], report_type=report_type, start_time=start_time, end_time=end_time)
        return export_obj.single_export(celery_obj=celery_obj, is_file=is_file, path=path)
    # 多个资产， 各个资产单独生成报告， 然后打包
    else:
        return _export_zip_report(celery_obj, job_ids, report_type, start_time, end_time)
