from urllib import parse
import copy
from collections import defaultdict
from app.db.models.asset_tasks import get_monitor_urls
from app.libs.enums import FOREIGN_LINKS_PLUGIN_TUPLE
from app.libs.display import WARNING_CATEGORY_KEY_LABEL_MAP, DESCRIPTION_MAP, CATEGORY_MAP

from . import BaseHandler
from .ssl import VUL_LEVEL_MAP
from .utils import get_warning_id
from .events import Event

RiskTypeMapping = {
    1: "icp",
    2: "keyword",
    3: "domain",
    4: "seo",
}


class SecurityEventHandler(BaseHandler):
    def handle(self):
        return self._handle_security(response=self.response, target=self.target)

    def _handle_security(self, response, target):
        warnings = []
        for key, detail in response.items():
            if hasattr(self, f"parse_{key}"):
                parse_func = getattr(self, f"parse_{key}")
                parse_warnings = parse_func(target=target, details=detail)
                warnings.extend(parse_warnings)
        warnings_addition = {
            "addition": {
                "statistics": {
                    "urls": len(get_monitor_urls(task_session_id=self.task.taskSessionId, is_source_data=True)),
                    "scan_count": get_monitor_urls(
                        task_session_id=self.task.taskSessionId, is_scan_count=True, is_source_data=True),
                },
                "site_info": {
                    "waf_list": response.get("site_info", {}).get("waf_list", [])
                },
                "target_ip": response.get("target_ip", "")
            },
            "warnings": warnings
        }

        if target_status := Event._target_status(response.get("site_info", {})):
            warnings_addition["targetStatus"] = target_status
        return warnings_addition

    def parse_black_links(self, target, details):
        parse_category = "black_links"
        warnings = []
        for detail in details:
            affects_info = self._get_affects_url(detail.get("refer_path"), target)
            if not affects_info.get("addition", {}).get("outURL"):
                affects_info["addition"]["outURL"] = detail.get("url", "")
                affects_info["affects"] = detail.get("url", "")
            warning = {
                "id": get_warning_id(
                    risk_title=parse_category,
                    task_type=self.task_type,
                    url=detail.get("url"),
                    job_id=self.task.jobId
                ),
                "addition": affects_info["addition"],
                "affects": affects_info["affects"],
                "level": VUL_LEVEL_MAP["高"],
                "title": WARNING_CATEGORY_KEY_LABEL_MAP[self.task_type][parse_category],
                "category": "black_links",
                "detailText": f"{detail.get('url')}发现暗链 {len(detail.get('links', []))}个",
                "detail": detail,
                "isResolved": False
            }
            warnings.append(warning)
        return warnings

    def parse_broken_links(self, target, details):
        parse_category = "broken_links"
        page_map = defaultdict(list)
        for detail in details:
            page_map[detail.get("referer")].append(detail.get("url"))

        warnings = []
        for referer, links in page_map.items():
            links_end = links[-1]
            warning = {
                "id": get_warning_id(
                    risk_title=parse_category,
                    task_type=self.task_type,
                    url=referer,
                    job_id=self.task.jobId
                ),
                "addition": {
                    "outURL": "" if links_end.startswith(target) else links_end
                },
                "affects": referer,
                "level": VUL_LEVEL_MAP["低"],
                "title": WARNING_CATEGORY_KEY_LABEL_MAP[self.task_type][parse_category],
                "category": parse_category,
                "detailText": f"{referer}发现坏链 {len(links)} 个",
                "detail": links,
                "isResolved": False
            }
            warnings.append(warning)
        return warnings

    def parse_foreign_links(self, target, details):
        parse_category = "foreign_links"
        url_obj = defaultdict(dict)
        moment_obj = []
        url_confidence = defaultdict(dict)
        plugins = self.task_settings.get("collect", {}).get("plugins", [])
        foreign_link_types = set([_ for _ in plugins if "foreign_links" in _])
        if "foreign_links" in foreign_link_types:
            foreign_link_types = FOREIGN_LINKS_PLUGIN_TUPLE

        for detail in details:
            if detail.get("key") == "risk_link":
                for result in detail.get("results"):
                    if f"foreign_links_{int(result.get('risk_type'))}" not in foreign_link_types:
                        continue
                    result_new = copy.deepcopy(result)
                    result_new.update({
                        "extra_details": result.get("extra_details", {}).get("zh_cn"),
                        "url": detail.get("url"),
                        "found_at": detail.get("found_at"),
                        "key": RiskTypeMapping[result.get("risk_type")]
                    })
                    moment_obj.append(result_new)

                if not url_confidence.get(detail["url"]):
                    url_confidence[detail["url"]] = {
                        "confidence": detail.get("confidence", 0),
                        "confidence_level": detail.get("confidence_level", 0)
                    }
                else:
                    url_confidence[detail["url"]]["confidence"] = max(
                        url_confidence[detail["url"]]["confidence"],
                        detail.get("confidence", 0)
                    )
                    url_confidence[detail["url"]]["confidence_level"] = max(
                        url_confidence[detail["url"]]["confidence_level"],
                        detail.get("confidence_level", 0)
                    )
            else:
                moment_obj.append(detail)
        domain = self.task.domain
        for e in moment_obj:
            url = e["url"]
            risk_url = e["risk_url"]
            key = e["key"]
            # 未备案域名为本站域名则不计入
            if key == "icp" and parse.urlparse(risk_url).hostname == domain:
                continue
            if url_obj.get(url):
                if url_obj.get(url).get(risk_url):
                    if not url_obj.get(url).get(risk_url).get(key):
                        url_obj[url][risk_url][key] = []
                    url_obj[url][risk_url][key].append(e)
                else:
                    url_obj[url][risk_url] = {}
                    url_obj[url][risk_url][key] = []
                    url_obj[url][risk_url][key].append(e)
            else:
                url_obj[url] = {}
                url_obj[url][risk_url] = {}
                url_obj[url][risk_url][key] = []
                url_obj[url]["found_at"] = e.get("found_at")
                url_obj[url][risk_url][key].append(e)

        warnings = []
        for effects, detail in url_obj.items():
            found_at = detail.pop("found_at", "")
            detail_count = len(detail.keys())
            warning = {
                "detail": detail,
                "addition": {
                    "outURL": effects
                },
                "affects": effects,
                "id": get_warning_id(
                    risk_title=parse_category,
                    task_type=self.task_type,
                    url=effects,
                    job_id=self.task.jobId
                ),
                "foundAt": found_at,
                "level": url_confidence.get(effects, {}).get("confidence_level", 2),
                "confidence": url_confidence.get(effects, {}).get("confidence", 0),
                "title": '风险外链',
                "category": 'foreign_links',
                "detailText": f"{effects}发现 {detail_count}处风险外链",
                "isResolved": False
            }
            warnings.append(warning)
        return warnings

    def parse_cryjack(self, target, details):
        parse_category = "cryjack"
        warnings = []
        for detail in details:
            affects_info = self._get_affects_url(detail.get("refer_path"), target)
            if not affects_info.get("addition", {}).get("outURL"):
                affects_info["addition"]["outURL"] = detail.get("url", "")
                affects_info["affects"] = detail.get("url", "")
            warning = {
                "addition": affects_info["addition"],
                "affects": affects_info["affects"],
                "id": get_warning_id(
                    risk_title=parse_category,
                    task_type=self.task_type,
                    url=detail.get("url"),
                    job_id=self.task.jobId
                ),
                "level": VUL_LEVEL_MAP['高'],
                "title": WARNING_CATEGORY_KEY_LABEL_MAP[self.task_type][parse_category],
                "category": parse_category,
                "detailText": f"${detail.get('url')}发现 ${len(detail.get('results'))}处恶意挖矿程序特征",
                "detail": detail.get("results"),
                "isResolved": False
            }
            warnings.append(warning)
        return warnings

    def parse_malscan(self, target, details):
        parse_category = "malscan"
        warnings = []
        for info in details:
            detail = []
            for description_key, description in DESCRIPTION_MAP.items():
                description_infos = info.get("staticinfo", {}).get(description_key, [])
                for description_info in description_infos:
                    for url in description_info:
                        for vul_key in description_info[url]:
                            description = description.get(vul_key, {}).get("zh-cn", "")
                            detail_item = {
                                "description": description,
                                "url": url,
                                "category": CATEGORY_MAP[description_key]
                            }
                            detail.append(detail_item)
            affects_info = self._get_affects_url(refer_path=info.get("url"), target=target)
            warning = {
                "detail": detail,
                "addition": affects_info["addition"],
                "affects": affects_info["affects"],
                "id": get_warning_id(
                    risk_title="malscan",
                    task_type=self.task_type,
                    url=info.get("url"),
                    job_id=self.task.jobId
                ),
                "level": VUL_LEVEL_MAP['高'],
                "title": '网页挂马',
                "category": parse_category,
                "detailText": f"{info.get('url')}发现 {len(detail)}处网页挂马特征",
                "isResolved": False
            }
            warnings.append(warning)
        return warnings

    def _get_affects_url(self, refer_path, target):
        data = {
            "affects": "",
            "addition": {
                "outURL": ""
            }
        }
        if isinstance(refer_path, list):
            for index in range(len(refer_path))[::-1]:
                elem = refer_path[index]
                if elem.startswith(target):
                    data["affects"] = elem
                    data["addition"]["outURL"] = "" if index == len(refer_path) - 1 else refer_path[index + 1]
                    break
        else:
            data["affects"] = refer_path
            data["addition"]["outURL"] = "" if refer_path.startswith(target) else refer_path

        return data
