import datetime
import json

from bson import ObjectId
from flask import request, g

from app.db.models.asset_tasks import AssetTasks
from app.db.models.tasks import Task
from app.db.models.url_blacklist import UrlBlackList
from app.db.models.url_blacklist_settings import UrlBlackListSettings
from app.config.settings import DATETIME_FMT, UTC_PREFIX
from app.libs.oms import Client
from app.libs.utils import get_job, get_url_path_params
from app.celery.tasks import celery_send_to_alert_group


RELATE_MAP = {
    'jobs': {'$lookup': {'from': "jobs", 'localField': "job_id", 'foreignField': "_id", 'as': "jobs"}},
    'users': {'$lookup': {'from': "users", 'localField': "uid", 'foreignField': "_id", 'as': "users"}},
    'url_blacklist': {
        '$lookup': {'from': "url_blacklist", 'localField': "_id", 'foreignField': "setting_id", 'as': "url_blacklist"}}
}


class UdBBaseHandler(object):
    model = UrlBlackList
    search_key_limit = ('jobs.0.note', 'jobs.0.targetUrl')
    base_relate = ('jobs',)
    base_query = {}
    sort_rule = {}
    allow_key = ()
    page_addition = 0  # page addition

    def __init__(self):
        request_args = request.args
        self.username = request_args.get('username')
        self.target_name = request_args.get('targetName')
        self.target_url = request_args.get('targetUrl')
        self.search_key = request_args.get('searchKey')
        self.role = g.role
        self.user = g.user
        self.page = int(request_args.get('page', 1))
        self.limit = int(request_args.get('limit', 10))

        self.data_length = 0
        self.data = self._get_data()

    def _get_data(self):
        lookup = [RELATE_MAP.get(_relate, {}) for _relate in self.base_relate]
        query = {**self.base_query}
        if self.role.name == '普通用户':
            query.update({'uid': self.user.id})
            if self.search_key is not None:
                query.update({
                    '$or': [{f'{_item}': {'$regex': f'{self.search_key}'}} for _item in self.search_key_limit]})
        else:
            if self.username is not None:
                query.update({"users.0.username": {'$regex': f'{self.username}'}})
            if self.target_name is not None:
                query.update({"jobs.0.note": {'$regex': f'{self.target_name}'}})
            if self.target_url is not None:
                query.update({"jobs.0.targetUrl": {'$regex': f'{self.target_url}'}})

            lookup.append(RELATE_MAP.get('users'))
            self.base_relate = (*self.base_relate, 'users')

        pagination = [
            {'$skip': (self.page - 1) * self.limit},
            {'$limit': (self.page + self.page_addition) * self.limit}
        ]
        data_pipeline = [*lookup, {'$match': query}]
        if self.sort_rule:
            data_pipeline.append({'$sort': self.sort_rule})
        res = list(self.model.objects.aggregate([*data_pipeline, *pagination]))
        total_count = tuple(self.model.objects.aggregate([*data_pipeline, {'$count': 'count'}]))
        if total_count:
            self.data_length = total_count[0].get('count')

        # 获取asset_tasks的_id和triggerType
        task_ids = [_['task_id'] for _ in res if _.get("task_id")]
        if not task_ids:
            return res
        task_map = {}
        task_types = {}
        for _ in Task.objects.filter(id__in=task_ids).only('taskId', 'taskType'):
            task_map[_.id] = {'task_id': _.taskId, 'taskType': _.taskType}
            task_types.setdefault(_.taskType, []).append(_.taskId)
        scan_task_map = {}
        # asset_task以及tasks会被删除，获取不到时不跳转
        for task_type, task_ids in task_types.items():
            for _ in AssetTasks.objects.find({f'{task_type}Settings.taskId': {'$in': task_ids}}):
                scan_task_map[getattr(_, f'{task_type}Settings').taskId] = {'asset_task_id': _.id, 'triggerType': _.triggerType}
        for k, v in task_map.items():
            v.update({
                'asset_task_id': scan_task_map.get(v.get('task_id', ""), {}).get('asset_task_id', ''),
                'triggerType': scan_task_map.get(v.get('task_id', ""), {}).get('triggerType', '')
            })
        for _ in res:
            _.update({
                'asset_task_id': task_map.get(_.get('task_id', ""), {}).get('asset_task_id', ""),
                'triggerType': task_map.get(_.get('task_id', ""), {}).get('triggerType', ""),
            })

        return res

    def _get_clean_data(self, data):
        new_data = []
        for _data in data:
            _temp_dict = {}
            for k in self.allow_key:
                if (v := _data.get(k)) is not None:
                    _temp_dict[k] = self._clean_value(v)
                else:
                    for _relate in self.base_relate:
                        if (_relate_data := _data.get(_relate, [])) and (v := _relate_data[0].get(k, '')):
                            break
                    _temp_dict[k] = self._clean_value(v)

            if hasattr(self, '_get_extra_key'):
                _temp_dict.update(self._get_extra_key(_data))

            new_data.append(_temp_dict)
        return new_data

    @staticmethod
    def _clean_value(value):
        if isinstance(value, datetime.datetime):
            return f"{value.isoformat()}Z"
        elif isinstance(value, ObjectId):
            return str(value)
        else:
            return value


class UdbGetBlacklist(UdBBaseHandler):
    model = UrlBlackList
    search_key_limit = ('url', 'jobs.0.note', 'jobs.0.targetUrl')
    base_relate = ('jobs',)
    sort_rule = {'create_time': -1}
    allow_key = (
        '_id', 'url', 'create_time', 'add_type', 'note', 'targetUrl', 'asset_task_id', 'triggerType',
        'detail', 'task_id', 'job_id', 'task_type', 'username', 'notification', 'alertGroupIds'
    )
    page_addition = 10

    def _refresh_blacklist(self):
        blacklist_dict = {}
        job_map = {}
        del_list = []
        for _data in self.data:
            job_id = _data.get('job_id')
            if not (_job := job_map.get(job_id)):
                _job = get_job(job_id)
                if not _job:
                    del_list.append(_data.get('_id'))
                    continue
                job_map[job_id] = _job

            if _job:
                blacklist_dict[str(_data.get('_id', ''))] = {
                    'dns_id': _job.dns_id,
                    'url': _data.get('url', '')
                }
            else:
                del_list.append(_data.get('_id'))

        request_params = json.dumps({
            'urlBlacklist': blacklist_dict
        })
        resp = Client().post('/api/inner/dnss/udb_sync_blacklist/', request_params).get('data', {})
        del_list += resp.get('del_list', [])
        if del_list:
            for blacklist_id in del_list:
                _obj = UrlBlackList.objects.get(pk=ObjectId(blacklist_id))
                _obj.delete()
            self.data_length -= len(del_list)

        new_data = []
        for _data in self.data:
            if str(_data.get('_id')) not in del_list:
                new_data.append(_data)

        return new_data, len(del_list)

    def get(self):
        while (_res := self._refresh_blacklist()) and _res[1] > 0 and len(_res[0]) < self.limit:
            self.data = self._get_data()

        data = self._get_clean_data(_res[0][:self.limit])
        return {'data': data, 'count': self.data_length}

    @staticmethod
    def _get_extra_key(data):
        if job := data.get('jobs'):
            target_url = job[0].get('targetUrl', '')
            return {'url': f"{target_url}{data.get('url')}"}
        return {'url': data.get('url')}


class UdbGetBlackListSettings(UdBBaseHandler):
    model = UrlBlackListSettings
    search_key_limit = ('jobs.0.note', 'jobs.0.targetUrl')
    base_relate = ('jobs', 'url_blacklist')
    sort_rule = {'create_time': -1}
    allow_key = ('_id', 'note', 'targetUrl', 'settings', 'username', 'notification', 'alertGroupIds')

    def get(self):
        data = self._get_clean_data(self.data)
        return {'data': data, 'count': self.data_length}

    @staticmethod
    def _get_extra_key(data):
        used = False
        if data.get('url_blacklist', []):
            used = True
        return {'used': used}


class ScanUrlBlackListSetting(object):
    def __init__(self, collection):
        self.collection = collection

    def get_uid_setting_map(self, uid_list: list):
        # TODO: REWRITE THIS FUNCTION WHEN USED PYMONGO.
        uid_settings_map = {}
        for item in self.collection.find({"uid": {"$in": uid_list}}).order_by('-create_time'):
            _temp_dict = json.loads(item.settings.to_json())
            _temp_dict['setting_id'] = item.id
            _temp_dict['job_id'] = item.job_id
            _temp_dict['alertGroupIds'] = item.alertGroupIds
            _temp_dict['notification'] = item.notification

            _temp_list = uid_settings_map.setdefault(item.uid, [])
            _temp_list.append(_temp_dict)

        return uid_settings_map


class ScanUrlBlackList(object):
    VUL_LEVEL_MAP = {
        1: 'low',
        2: 'low',
        3: 'medium',
        4: 'high',
        5: 'critical',
    }

    NAME_MAP = {
        'vul': {
            'low': '低危漏洞',
            'medium': '中危漏洞',
            'high': '高危漏洞',
            'critical': '严重漏洞',
        },
        'securityEvent': {
            "black_links": '暗链',
            "broken_links": '坏链',
            "cryjack": '挖矿',
            "malscan": '挂马',
            "foreign_links": '风险外链',
        }
    }

    def __init__(self, collection):
        self.collection = collection
        self.warning_list = []
        self.uid_settings_map = {}
        self.uid_user_map = {}
        self.aid_asset_map = {}

        self.update_dict = {}

    def set_warnings(self, warning_lit):
        """
        warning = [{
            "end_time": "2021-01-01 00:00:00"
            "task_type": "vul",
            "task_id": ObjectId,
            "job_id": ObjectId,
            "uid": ObjectId,
            "warnings": List
        }]
        """
        self.warning_list = warning_lit

    def set_uid_settings_map(self, uid_settings_map):
        self.uid_settings_map = uid_settings_map

    def set_uid_user_map(self, uid_user_map):
        self.uid_user_map = uid_user_map

    def set_aid_asset_map(self, aid_asset_map):
        self.aid_asset_map = aid_asset_map

    def travel(self):
        for _item in self.warning_list:
            _uid = _item.get('uid')
            _job_id = _item.get('job_id')
            _task_type = _item.get('task_type')
            _end_time = _item.get('end_time')
            _username = self.uid_user_map.get(_uid, {}).username
            _asset = self.aid_asset_map.get(_job_id, {})
            _note = _asset.note
            _target_url = _asset.targetUrl
            user_setting_list = self.uid_settings_map.get(_uid, [])
            if not user_setting_list:
                continue

            _task_id = _item.get('task_id')
            warning_dict = self._parse_warning(_item.get('warnings'), _task_type)
            if not warning_dict:
                continue

            for _url, _warning_k_v in warning_dict.items():
                if _hit_res := self.is_hit(user_setting_list, _warning_k_v, _task_type, _job_id):
                    _ex_url = get_url_path_params(_url)
                    _setting_id, _notification, _alert_group = _hit_res[0], _hit_res[1], _hit_res[2]
                    _detail = self._get_detail(_url, _end_time, _task_type, _warning_k_v)
                    _temp_dict = self.update_dict.setdefault(
                        (_username, _target_url, _job_id),
                        {'update_list': [], 'blacklist': [], 'alert_group': [], "ex_blacklist": []}
                    )
                    _temp_dict['alert_group'] = _alert_group
                    _temp_dict['notification'] = _notification
                    _temp_dict['blacklist'].append(_url)
                    _temp_dict['ex_blacklist'].append(_ex_url)
                    _temp_dict['end_time'] = (_end_time+UTC_PREFIX).strftime(DATETIME_FMT)
                    _temp_dict['note'] = _note
                    _temp_dict['update_list'].append({
                        "ex_url": _ex_url,
                        "detail": _detail,
                        "url": _url,
                        "add_type": "系统",
                        "uid": _uid,
                        "task_id": _task_id,
                        "job_id": _job_id,
                        "task_type": _task_type,
                        "setting_id": _setting_id
                    })

    def save(self):
        for (username, target_url, job_id), update_dict in self.update_dict.items():
            update_dict['success_list'] = []
            _update_list = update_dict.get('update_list', [])
            _blacklist = update_dict.get('blacklist', [])
            _ex_blacklist = update_dict.get('ex_blacklist', [])
            job = get_job(job_id)
            if not job:
                # TODO: log
                continue

            params = {
                'username': username,
                'dns_id': job.dns_id,
                'site_id': job.site_id,
                'blacklist': _ex_blacklist,
                'operator': 'scanv'
            }
            try:
                resp_data = Client().post('/api/inner/dnss/udb_add_blacklist/', json.dumps(params)).get('data', {})
                failed_list = resp_data.get('failed', [])
                succeed_list = resp_data.get('succeed', [])
            except:
                failed_list = _blacklist
                succeed_list = []
            for _update_one in _update_list:
                _url = _update_one.get('url')
                _ex_url = _update_one.get('ex_url')
                if _ex_url in failed_list:
                    continue
                _update_one['url'] = _update_one.pop('ex_url')
                if not (_exists_blacklist := UrlBlackList.objects.filter(url=_ex_url, job_id=job.id).first()):
                    # new
                    UrlBlackList(**_update_one, create_time=datetime.datetime.utcnow()).save()
                    update_dict['success_list'].append(_url)
                elif _ex_url in succeed_list:
                    # cover
                    _exists_blacklist.update(create_time=datetime.datetime.utcnow())
                    update_dict['success_list'].append(_url)

            update_dict['failed_list'] = [f"{target_url}{failed_url}" for failed_url in failed_list]

    def get_update_dict(self):
        return self.update_dict

    def _parse_warning(self, warnings, task_type):
        _res_dict = {}
        if task_type == 'vul':
            _res_dict = self._parse_vul(warnings)
        elif task_type == 'securityEvent':
            _res_dict = self._parse_security_event(warnings)

        return _res_dict

    def _parse_vul(self, warnings):
        warning_dict = {}
        for _warning in warnings:
            if _warning.ignoredTime:
                continue
            _url = _warning.affects
            _value_dict = warning_dict.setdefault(_url, {"critical": 0, "high": 0, "medium": 0, "low": 0})

            if _level_name := self.VUL_LEVEL_MAP.get(_warning.level):
                _value_dict[_level_name] += 1

        return warning_dict

    def _parse_security_event(self, warnings):
        warning_dict = {}
        for _warning in warnings:
            if _warning.ignoredTime:
                continue
            # 坏链数据不加黑
            if _warning.category == "broken_links":
                continue
            _url = _warning.affects
            _value_dict = warning_dict.setdefault(
                _url, {"black_links": 0, "broken_links": 0, "foreign_links": 0, "cryjack": False, "malscan": False})

            if _level_name := _warning.category:
                if _level_name == "black_links":
                    _value_dict[_level_name] += len(_warning.detail.get("links", []))
                elif isinstance(_value_dict[_level_name], bool):
                    _value_dict[_level_name] = True
                else:
                    _value_dict[_level_name] += len(_warning.detail)

        return warning_dict

    def _get_detail(self, url, end_time, task_type, data):
        name_map = self.NAME_MAP.get(task_type, {})
        msg = ""
        for k, v in data.items():
            if v:
                s0 = f"{name_map.get(k, '')}"
                if not isinstance(v, bool):
                    s0 = f"{v}个{s0}"
                msg = f"{msg}, {s0}" if msg else s0

        prefix_end_time = end_time + UTC_PREFIX
        return f"{url}页面在{prefix_end_time.strftime(DATETIME_FMT)}监测发现存在{msg}，系统已自动加黑该风险URL。"

    @staticmethod
    def _is_matched(_setting_dict, warning_data):
        matched_map = {}
        for _k, _setting_v in _setting_dict.items():
            _warning_value = warning_data[_k]
            if (not isinstance(_setting_v, bool)) and _warning_value >= _setting_v:
                matched_map[_k] = True
            elif isinstance(_setting_v, bool) and _warning_value == _setting_v:
                matched_map[_k] = True
            else:
                matched_map[_k] = False
        return matched_map

    def is_hit(self, user_setting_list, warning_data, task_type, job_id):
        for _setting in user_setting_list:
            if _setting.get('job_id') != job_id:
                continue
            _setting_dict = _setting.get(task_type)
            # 规避数据全是false、0的情况
            if (not any(warning_data.values())) and (not any(_setting_dict.values())):
                continue
            matched_map = self._is_matched(_setting_dict, warning_data)
            is_matched = True
            for mk, mv in matched_map.items():
                if mk == "broken_links":
                    continue
                is_matched = mv
                if not is_matched:
                    break
            if is_matched:
                return _setting.get('setting_id'), _setting.get('notification'), _setting.get('alertGroupIds')
        return False


class Send(object):
    def __init__(self):
        self.update_dict = {}

    def set_update_dict(self, update_dict):
        self.update_dict = update_dict

    def send_to_alert_group(self):
        for (username, target_url, job_id), update_dict in self.update_dict.items():
            failed_list = update_dict.get('failed_list', [])
            params = {
                'user': username,
                'name': update_dict.get('note', ''),
                'dz': target_url,
                'time': update_dict.get('end_time'),
                'num': len(update_dict.get('success_list', [])),
                'domain_list': update_dict.get('success_list', []),
                'dz1': failed_list[0] if failed_list else '',
                'num1': len(failed_list)
            }
            alert_group = update_dict.get('alert_group', [])
            if alert_group:
                celery_send_to_alert_group.delay(params, alert_group, update_dict.get('notification').to_dict())
