import logging

from bson import ObjectId

from app.db.models.jobs import Job
from app.db.models.wass import ScanEvents
from app.db.models.inspection import IgnoreStrategies
from app.db.models.tasks import IgnoreWarning, Task
from app.db.models.asset_tasks import get_monitor_urls
from app.libs.enums import InspectionStrategyIgnoreType, TaskType
from app.libs.utility import filter_port_from_url
from app.libs.ipv6_check import IPv6Score, ipv6_support_check
from app.celery.handle_result.utils import get_warning_id


logger = logging.getLogger()

WAF_ERROR_HTTP_CODE = [401, 403, 407, 419, 429, 451, 503]
NOT_FIND_HTTP_CODE = [404, 500, 502, 504]


class Event(object):
    def __init__(
        self, task_session_id, session_type, task_type, job_id="", plugins=None
    ):
        self.job_id = job_id

        self.plugins = plugins if plugins else []
        self.events = []
        task_func = {
            "ping": self._parse_ping,
            "http": self._parse_http,
            "securityEvent": self._parse_security_event,
            "asset": self._parse_asset,
            "vul": self._parse_vul,
            "ssl": self._parse_ssl,
            "content": self._parse_content,
            "hostVul": self._parse_vul_host,
            "ipv6": self._parse_ipv6,
            TaskType.change_check.value: self._parse_change_check,
        }
        self.get_event_func = task_func.get(task_type)
        self._task_session_id = task_session_id
        self.ignore_strategies = {}
        # 资产所有已经忽略的威胁id
        self.ignore_warning_ids = self._get_job_ignore_warnings(task_type)
        self.events = self._load_events(
            task_session_id, ["site_info", "statistics"]
        )

    def _load_events(self, task_session_id, event_names):
        events = [
            item.to_mongo().to_dict()
            for item in ScanEvents.objects.filter(
                task_session_id=task_session_id, event_name__nin=event_names
            ).limit(2000)
            if not self._filter(item)
        ]

        statistics_events = [
            item.to_mongo().to_dict()
            for item in ScanEvents.objects.filter(
                task_session_id=task_session_id, event_name__in=event_names
            )
        ]
        # 解决一个任务多个statistics事件，url不一致, 导致前台展示可能为0
        statistics_list = []
        urls_list = [0, ]
        for statistics in statistics_events:
            if statistics["event_name"] == "statistics" and "urls" in statistics["detail"]:
                urls_list.append(statistics["detail"].get("urls", 0))
                statistics_list.append(statistics)
        max_urls = max(urls_list)
        for statistics in statistics_list:
            statistics["detail"]["urls"] = max_urls
        return events + statistics_events

    def _filter(self, event) -> bool:
        key = event.detail.get("key", "")
        # 一个target_url下会有多重url，只能通过target_url模糊匹配拿到所有相似忽略策略
        target_url = event.target_url
        url = event.detail.get("url", "")
        if not self.ignore_strategies:
            self._load_ignore_strategies(target_url)
        if key + "," + url in self.ignore_strategies["keys"]:
            return True
        return False

    def _load_ignore_strategies(self, url):
        url = url.replace("http://", "").replace("https://", "")
        if url.endswith("/"):
            url = url[:-1]
        ignore_strategies = IgnoreStrategies.objects.filter(
            **{
                "url": {"$regex": f".*{url}.*"},
                "ignoreType": InspectionStrategyIgnoreType.FALSE_POSITIVE.value,
            }
        )
        keys = [
            ignore_strategy.key + "," + ignore_strategy.url
            for ignore_strategy in ignore_strategies
        ]
        self.ignore_strategies["keys"] = keys

    def _get_job_ignore_warnings(self, task_type):
        if (task_type not in ["vul", "ssl", "securityEvent", "content"]) or (not self.job_id):
            return {}
        return {w.detail.id: w.ignoredTime for w in IgnoreWarning.objects.filter(jobId=self.job_id)}

    def get_target_ip(self):
        db = ScanEvents._get_db()
        scan_task_sessions = db["scan_task_sessions"]
        task_session_obj = (
            scan_task_sessions.find_one(
                {"task_session_id": self._task_session_id}, {"target_ip": 1}
            )
            or {}
        )
        return task_session_obj.get("target_ip", "")

    def get_event_info(self):
        if self.get_event_func:
            data = self.get_event_func()
        else:
            data = {}
        if addition := data.get("addition", {}):
            addition["target_ip"] = self.get_target_ip()
        else:
            data.update({"target_ip": self.get_target_ip()})
        return data

    @staticmethod
    def _target_status(detail):
        if (not detail) or not isinstance(detail, dict):
            return {}

        http_get_sequence_dict = detail.get("http_get_sequence", {})
        detail_ping = detail.get("detail_ping", {})
        trace_path = detail.get("tracepath", {})
        if not all([http_get_sequence_dict, detail_ping]):
            return {}
        detail_ping["loss_rate"] = int(detail_ping["loss_rate"] * 100)
        network_type = http_get_sequence_dict.get("network_type", 0)
        http_get_sequence = http_get_sequence_dict.get("ipv4_result", []) if network_type == 1 \
            else http_get_sequence_dict.get("ipv6_result", [])
        if not http_get_sequence:
            return {}
        error_http_get_sequence = [
            _ for _ in http_get_sequence if
            _.get("exception", {}).get("exception_type") != 0 or
            _.get("status") in WAF_ERROR_HTTP_CODE + NOT_FIND_HTTP_CODE
        ]
        if not error_http_get_sequence:
            http_get = http_get_sequence[-1]
        else:
            http_get = error_http_get_sequence[-1]
            waf_error_http_get = [_ for _ in error_http_get_sequence if _.get("status") in WAF_ERROR_HTTP_CODE]
            not_find_http_get = [_ for _ in error_http_get_sequence if _.get("status") in NOT_FIND_HTTP_CODE]
            if waf_error_http_get:
                http_get = waf_error_http_get[-1]
            elif not_find_http_get:
                http_get = not_find_http_get[-1]
        cloud = detail.get("cloud", {})
        cloud_http = {}
        if _cloud_http := cloud.get("detail_http", []):
            cloud_http = _cloud_http[0]
        cloud_ping = {}
        if (_cloud_detail_ping := cloud.get("detail_ping", [])) and \
                _cloud_detail_ping[0].get("ip"):
            cloud_ping = _cloud_detail_ping[0]
        elif (_cloud_detail_ping_v6 := cloud.get("detail_ping_v6", [])) and \
                _cloud_detail_ping_v6[0].get("ip"):
            cloud_ping = _cloud_detail_ping_v6[0]
        if cloud_ping:
            cloud_ping["loss_rate"] = int(cloud_ping["loss_rate"] * 100)
        status = "good"
        is_http_exception = http_get.get("exception", {}).get("exception_type") != 0 or \
                            http_get.get("status") in WAF_ERROR_HTTP_CODE + NOT_FIND_HTTP_CODE
        if is_http_exception and http_get.get("status") in WAF_ERROR_HTTP_CODE:
            status = "http_waf_error"
            http_get["exception"]["content"] = http_get["response"]["content"]
        elif is_http_exception and http_get.get("status") in NOT_FIND_HTTP_CODE:
            status = "http_code_error"
            http_get["exception"]["content"] = http_get["response"]["content"]
        elif is_http_exception and http_get.get("exception", {}).get("exception_type") != 1:
            status = "http_status_error"
        elif is_http_exception and int(detail_ping.get("loss_rate", 0)) != 100:
            status = "http_waf_error"
        elif is_http_exception and (cloud_http and not cloud_http.get("has_error")):
            status = "http_waf_error"
        elif is_http_exception and (cloud_ping and cloud_ping.get("loss_rate") != 100):
            status = "http_waf_error"
        elif is_http_exception and (cloud_ping and cloud_ping.get("loss_rate") == 100):
            status = "ping_error"
        elif is_http_exception:
            status = "http_status_error"
        # http 异常时， 去掉response默认值
        if is_http_exception:
            http_get["response"] = {}
        else:
            http_get["exception"] = {}
        target_status = {
            "status": status,
            "httpGetSequence": http_get,
            "detailPing": detail_ping,
            "tracePath": trace_path
        }
        return target_status

    @staticmethod
    def _parse_vul_host():
        return {}

    def _parse_ping(self, kw="ping"):
        data = {}
        if self.events:
            for k, v in (
                self.events[0].get("detail", {}).get("cloud", {}).items()
            ):
                if kw in k:
                    data[k.replace(f"_{kw}", "")] = v
            data["probe_type"] = kw.upper()

        return data

    def _parse_http(self):
        return self._parse_ping(kw="http")

    def _parse_security_event(self):
        data = {}
        for event in self.events:
            _event_name = event.get("event_name")
            # prefix event name
            if _event_name == "cryptojacking":
                _event_name = "cryjack"

            _detail = event.get("detail", {})
            if _event_name == "site_info":
                data[_event_name] = _detail
                continue

            if _event_name == "statistics":

                if "broken_links" in self.plugins and event.get("job_name") == "nscan":
                    data.update({"broken_links": _detail.get("broken_links", [])})

                # nscan.statistics covered appscan.statistics
                if event.get("job_name") == "nscan":
                    data[_event_name] = _detail
                    continue
                elif event.get("job_name") != "nscan" and data.get(
                    _event_name
                ):
                    continue
                data[_event_name] = _detail
                continue

            _detail["found_at"] = event.get("created_at")
            if _event_name == "risk_link":
                # todo:
                if not any([_ for _ in self.plugins if "foreign_links" in _]):
                    continue
                _item = data.setdefault("foreign_links", [])
                _item.append(_detail)
            elif (
                _detail.get("foreign_depth", 0) >= 1
                and len(_ref_path := _detail.get("refer_path", [])) > 1
            ):
                if "foreign_links" not in self.plugins:
                    continue
                _detail["key"] = _event_name
                _detail["risk_url"] = filter_port_from_url(_detail.get("url"))
                _detail["url"] = filter_port_from_url(_ref_path[-2])
                _item = data.setdefault("foreign_links", [])
                _item.append(_detail)
            else:
                if _event_name not in self.plugins:
                    continue
                _item = data.setdefault(_event_name, [])
                _item.append(_detail)

        return data

    def _parse_content(self):
        data = {
            "warningCount": 0,
            "warningResolvedCount": 0,
            "warningUnresolvedCount": 0,
            "stats": {},
            "addition": {"statistics": {"urls": 0}},
        }
        warnings = {}
        for event in self.events:
            job = Job.objects().filter(pk=self.job_id).only("ignoredWarnIdMap").first()
            ignored_warn_id_map = job.ignoredWarnIdMap
            _event_name = event.get("event_name")
            _detail = event.get("detail", {})
            # 处理url统计信息
            if _event_name == "statistics":
                data["addition"]["statistics"]["urls"] = len(get_monitor_urls(
                    task_session_id=self._task_session_id, is_source_data=True))
                data["addition"]["statistics"]["scan_count"] = get_monitor_urls(
                    task_session_id=self._task_session_id, is_scan_count=True, is_source_data=True)
            # 防御信息
            elif _event_name == "site_info":
                data["addition"]["site_info"] = {
                    "waf_list": _detail.get("waf_list", [])
                }
                if target_status := self._target_status(_detail):
                    data["targetStatus"] = target_status
            # 处理关键词
            elif _event_name == "keyword":
                if not (url := _detail.get("url")):
                    continue
                warning_id = get_warning_id("content", self.job_id, url, "keyword")
                # 如果已经忽略, 不在记录此次威胁 (前台忽略威胁功能)
                if self.ignore_warning_ids.get(warning_id):
                    continue
                # 统计数据
                warning = warnings.setdefault(
                    "keyword" + url,
                    {
                        "category": "keyword",
                        "id": warning_id,
                        "level": 1,
                        "title": "敏感词",
                        "affects": url,
                        "detail": {},
                        "_id": ObjectId(),
                    },
                )
                # 如果已经忽略, 更新忽略时间 (后台标记威胁状态功能)
                if ignored_time := ignored_warn_id_map.get(warning.get("id")):
                    warning["ignoredTime"] = ignored_time

                for result in _detail.get("results", []):
                    warning["detail"].setdefault(
                        str(result.get("type", 8)), []
                    ).append(result.get("word", []))
                warning.setdefault("addition", {})
                warning["addition"]["results"] = _detail.get("results", [])
                # 处理隐私信息
            elif _event_name == "privacy_disclosure":
                if not (url := _detail.get("url")):
                    continue
                warning_id = get_warning_id("content", self.job_id, url, "privacy_disclosure")
                # 如果已经忽略, 不在记录此次威胁 (前台忽略威胁功能)
                if self.ignore_warning_ids.get(warning_id):
                    continue
                warning = warnings.setdefault(
                    "privacy_disclosure" + url,
                    {
                        "category": "privacy_disclosure",
                        "id": warning_id,
                        "level": 1,
                        "title": "隐私信息",
                        "affects": url,
                        "detail": {},
                        "_id": ObjectId(),
                    },
                )

                # 如果已经忽略, 更新忽略时间
                if ignored_time := ignored_warn_id_map.get(warning.get("id")):
                    warning["ignoredTime"] = ignored_time

                for result in _detail.get("results", []):
                    warning["detail"].setdefault(
                        str(result.get("type")), []
                    ).append(result.get("content", ""))
                # 取证支持
                warning.setdefault("addition", {})
                warning["addition"]["results"] = _detail.get("results", [])

        warning_count = 0
        data["warnings"] = list(warnings.values())

        # 报告的统计字段, 存到result.stats下面
        # url 风险URL数
        # keyword 敏感词URL数
        # privacyDisclosure 隐私信息URL数
        # contentChange 变更URL数
        # privacyDisclosure  # e.g, privacyDisclosure0 => 未知
        # keyword  # e.g, keyword1 => 手机号码
        stats = data["stats"]
        for warning in warnings.values():
            if not warning.get("ignoredTime", None):
                warning_count += 1
            category = warning.get("category")
            # url 风险URL数
            stats["url"] = stats.setdefault("url", 0) + 1
            # 各个类型的url数量
            stats[category] = stats.setdefault(category, 0) + 1
            for _type, value in warning.get("detail", {}).items():
                # 各个类型词语的数量
                stats[category + _type] = int(
                    stats.setdefault(category + _type, 0)
                ) + len(value)
        data["warningCount"] = warning_count
        return data

    def _parse_asset(self):
        data = {}
        if self.events:
            data = self.events[0].get("detail", {})

        return data

    def _parse_vul(self):
        data = {}
        for event in self.events:
            _event_name = event.get("event_name")
            _detail = event.get("detail", {})
            if _event_name == "statistics":
                data[_event_name] = _detail
            elif _event_name == "site_info":
                data[_event_name] = _detail
            else:
                _item = data.setdefault(_event_name, [])
                _item.append(_detail)

        return data

    def _parse_ssl(self):
        is_nscan = False
        if "nscan" in [e.get("job_name") for e in self.events]:
            is_nscan = True
        if self.events and not is_nscan:
            data = self.events[0].get("detail", {})
            data["found_at"] = self.events[0].get("created_at")
            return data
        data = {}
        results = []
        ssl_check = {}
        ssl_certificate_errors = [
            "ssl_check_sslexpire",
            "ssl_check_sslhostmismatch",
            "ssl_check_sslrevoke",
        ]
        if self.events and is_nscan:
            for event in self.events:
                if event.get("job_name") == "site_info":
                    data = event.get("detail", {})
                    data["found_at"] = event.get("created_at")
                elif (
                    event.get("job_name") == "nscan"
                    and event.get("event_name") == "statistics"
                ):
                    ssl_check = event.get("detail", {}).get("ssl_check", {})
                elif (
                    event.get("detail", {})
                    .get("source_id", "")
                    .startswith("ssl_check")
                ):
                    detail = event.get("detail", {})
                    source_id = detail.get("source_id")
                    category = (
                        "certificate"
                        if source_id in ssl_certificate_errors
                        else "protocol"
                    )
                    results.append(
                        {
                            "detail": detail.get("extra_detail"),
                            "risk": event.get("severity"),
                            "report_type": detail.get("report_type", ""),
                            "cwe": ",".join(detail.get("cwe", [])),
                            "parameter_value": detail.get(
                                "parameter_value", ""
                            ),
                            "postdata": detail.get("postdata", ""),
                            "from_type": "plugin",
                            "impact": detail.get("impact", {}).get(
                                "zh_cn", ""
                            ),
                            "cnnvd": ",".join(detail.get("cnnvd", [])),
                            "bid": detail.get("bid", ""),
                            "category": category,
                            "place": detail.get("place", ""),
                            "cve": ",".join(detail.get("cve", [])),
                            "update_date": event.get("updated_at").strftime(
                                "%Y%m%d"
                            ),
                            "ssvid": detail.get("ssvid", ""),
                            "description": detail.get("description", {}).get(
                                "zh_cn", ""
                            ),
                            "extra_detail": detail.get("extra_detail"),
                            "release_date": detail.get("updated_at").strftime(
                                "%Y%m%d"
                            ),
                            "url": detail.get("url"),
                            "cnvd": ",".join(detail.get("cnvd", [])),
                            "httpmethod": detail.get("httpmethod", ""),
                            "parameter": detail.get("parameter", ""),
                            "tags": detail.get("tags", ""),
                            "display_name": detail.get("name", {}).get(
                                "zh_cn", ""
                            ),
                            "recommendation": detail.get(
                                "recommendation", {}
                            ).get("zh_cn", ""),
                            "affects": detail.get("affect", ""),
                            "name": detail.get("name", {}).get("en_us", ""),
                            "reference": ",".join(detail.get("reference", [])),
                            "payload": detail.get("payload", ""),
                        }
                    )
            ssl_check["results"] = results
            data["sslcheck"] = ssl_check
        return data

    def _parse_ipv6(self):
        detail = self.events[0].get("detail", {}) if self.events else {}
        is_support_ipv6 = ipv6_support_check(detail)
        score_obj = IPv6Score(detail)
        score = score_obj.total_score
        return {"is_support_ipv6": is_support_ipv6, "score": score}

    def _parse_change_check(self):
        data = {"change_check": [], "site_info": {}}
        for event in self.events:
            _event_name = event.get("event_name")
            _detail = event.get("detail", {})
            if _event_name == "change_check":
                data["change_check"].append(_detail)
            elif _event_name == "site_info":
                data[_event_name] = _detail
            elif _event_name == "statistics":
                data[_event_name] = _detail
        return data
