import arrow
from mongoengine import Q

from utilities.meta_view import (
    APIView,
    APIViewList,
    APIViewUpdate,
    APIViewCreate,
    APIViewDestroy,
    APIViewRetrieve,
    Success,
    Response,
)

from utilities.enums import LogModuleName, LogTypeName
from apps.strategy.serializer import (
    CustomProfileTemplateSerializer,
    SystemProfileTemplateSerializer,
    SystemProfileTemplateSearchSerializer,
    CustomProfileTemplateSearchSerializer,
)
from apps.vul.serializer import get_plugin_dict
from utilities.enums import AuditLogAction
from utilities.log import audit_log
from utilities.utils import get_paginate
from utilities.scanner import ChangeCheckConfigScheduler
from models.strategy import ScanPageBaseline
from models.target import Targets, TargetGroups
from models.task import ScanTasks


class ListCustomProfileTemplate(APIViewList):
    serializer_class = CustomProfileTemplateSerializer
    queryset = serializer_class.Meta.model.objects

    def list(self):
        page, limit = self.get_paginate()

        if self.user_query:
            self.user_query['custom_id'] = self.user_query.pop('user_id', None)

        query = {**self.user_query}

        sys_template_list = list(
            SystemProfileTemplateSerializer.Meta.model.objects.filter(is_disabled__ne=True)
        )
        sys_template_count = len(sys_template_list)
        all_templates = self.queryset.filter(**query)
        sys_template_list.extend(list(all_templates))
        count = len(sys_template_list)
        sys_template_list = sys_template_list[(page - 1) * limit : page * limit]
        plugin_dict = get_plugin_dict()

        serializer = self.get_serializer(sys_template_list, many=True)

        data_list = []
        for item in serializer.data:
            plugin_list = item.pop('plugin_list', [])
            statistics = self.serializer_class.get_plugin_list_statistics(
                plugin_list, plugin_dict
            )
            item.update(statistics)

            data_list.append(item)

        ctx = {
            'templates': data_list,
            'page': page,
            'total': count,
            'sys_template_count': sys_template_count,
            'max_count': self.serializer_class.max_count,
        }

        return Response(ctx)


class CreateProfileTemplateHandler(APIViewCreate):
    serializer_class = CustomProfileTemplateSerializer
    queryset = CustomProfileTemplateSerializer.Meta.model.objects
    lookup_field = 'custom_profile_template_id'

    def create(self):
        serializer = self.get_serializer(
            data={**self.request.data, 'custom_id': self.request.user.user_id}
        )
        serializer.is_valid(raise_exception=True)
        self.perform_create(serializer)

        data = {
            "module": LogModuleName.policy_management.value,
            "action": AuditLogAction.create.value,
            "type": LogTypeName.profile_template.value,
            "desc": {"detail": f"创建自定义漏洞模板, 模版名称: {serializer.data.get('name', '')}"},
        }
        audit_log(request=self.request, **data)

        return Response(serializer.data)


class CustomProfileTemplateHandler(APIViewRetrieve, APIViewUpdate):
    serializer_class = CustomProfileTemplateSerializer
    queryset = CustomProfileTemplateSerializer.Meta.model.objects
    lookup_field = 'custom_profile_template_id'

    def update(self):
        resp = super(CustomProfileTemplateHandler, self).update()

        data = {
            "module": LogModuleName.policy_management.value,
            "action": AuditLogAction.up.value,
            "type": LogTypeName.profile_template.value,
            "desc": {"detail": f"编辑漏洞模板 '{self.request.data.get('name', 'unknown')}'"},
        }
        audit_log(request=self.request, **data)

        return resp


class BatchDeleteProfileTemplate(APIViewDestroy):
    serializer_class = CustomProfileTemplateSerializer
    queryset = CustomProfileTemplateSerializer.Meta.model.objects
    lookup_field = 'custom_profile_template_id'

    def destroy(self):
        template_ids = self.request.data.get('template_ids', [])
        if self.user_query:
            self.user_query['custom_id'] = self.user_query.pop('user_id', None)
        query = {
            getattr(self.serializer_class.Meta.model, '_meta').get('primary_key'): {
                '$in': template_ids
            },
            **self.user_query,
        }
        template_name_list = [item.name for item in self.queryset.find(query)]
        self.queryset.find(query).delete()

        data = {
            "module": LogModuleName.policy_management.value,
            "action": AuditLogAction.del_.value,
            "type": LogTypeName.profile_template.value,
            "desc": {"detail": f"删除漏洞模板 '{', '.join(template_name_list)}'"},
        }
        audit_log(request=self.request, **data)

        return Success()


class SearchProfileTemplate(APIViewList):
    serializer_class = CustomProfileTemplateSearchSerializer
    queryset = CustomProfileTemplateSearchSerializer.Meta.model.objects

    def list(self):
        search_key = self.request.GET.get('search_key')

        q = Q()
        sys_serializer_class = SystemProfileTemplateSearchSerializer
        sys_queryset = sys_serializer_class.Meta.model.objects.filter(is_disabled__ne=True)

        if search_key:
            q = q & (Q(name__contains=search_key) | Q(description__contains=search_key))

        cus_templates = self.queryset.filter(q)
        cus_serializer = self.get_serializer(cus_templates, many=True)
        sys_templates = sys_queryset.filter(q)
        sys_serializer = sys_serializer_class(sys_templates, many=True)

        return Response({'templates': [*sys_serializer.data, *cus_serializer.data]})


class ChangeCheckView(APIView):
    """变更检测信息"""

    check_user = True

    def get(self, request, *args, **kwargs):
        self._initialize(request, *args, **kwargs)
        page, limit = get_paginate(request)
        target_url = self.request.GET.get('target_url')
        target_name = self.request.GET.get('target_name')
        target_group_name = self.request.GET.get('target_group_name')

        query = Q(**self.user_query)
        tg_query = Q(**self.user_query)
        if target_name:
            query = query & (

                Q(target_name__contains=target_name)
                | (Q(target_name='') & Q(target_title__contains=target_name))
            )
        if target_url:
            query = query & Q(target_url__contains=target_url)
        if target_group_name:
            tg_query = tg_query & Q(target_group_name__contains=target_group_name)

        allow_target_group_keys = ('target_group_name', 'target_group_id')
        target_group_name_map = {}
        target_group_id_list = []
        for item in TargetGroups.objects.only(*allow_target_group_keys).filter(tg_query):
            target_group_id_list.append(item.target_group_id)
            target_group_name_map[item.target_group_id] = {
                'target_group_name': item.target_group_name,
                'target_group_id': item.target_group_id,
            }

        change_base = ScanPageBaseline.objects.all()
        task_id_dict = {}
        for base in change_base:
            if not task_id_dict.get(base.task_id):
                task_id_dict.update(
                    {
                        base.task_id: {
                            "base_page_count": 1,
                            "updated_at": arrow.get(base.updated_at).isoformat(),
                        }
                    }
                )
            else:
                task_id_dict[base.task_id]["base_page_count"] += 1
        task_ids = list(set(task_id_dict.keys()))
        task_query = {**self.user_query, 'task_id__in': task_ids}
        target_task_dict = {
            t.target_id: t.task_id
            for t in ScanTasks.objects.filter(**task_query)
        }
        query = query & Q(target_id__in=list(target_task_dict.keys()))
        query = query & Q(target_group_id__in=target_group_id_list)
        all_data = []
        for target in Targets.objects.filter(query):
            if target_task_dict.get(target.target_id):
                all_data.append(
                    {
                        "target_id": target.target_id,
                        "target_name": target.target_name or target.target_title,
                        "target_url": target.target_url,
                        "target_group_id": target.target_group_id,
                        "target_group_name": target_group_name_map.get(target.target_group_id, {}).get('target_group_name'),
                        "base_task_id": target_task_dict.get(target.target_id),
                        **task_id_dict.get(target_task_dict.get(target.target_id)),
                    }
                )
        all_data = list(
            sorted(all_data, key=lambda item: item["updated_at"], reverse=True)
        )
        count = len(all_data)
        page_count = (
            int(count / limit) if count % limit == 0 else int(count / limit) + 1
        )
        res_data = {
            "base_page": all_data[limit * (page - 1) : limit * page],
            "count": count,
            "page_count": page_count,
        }
        return Response(
            data={"status": "success", "errcode": 0, "data": res_data}, status=200
        )

    def put(self, request, *args, **kwargs):
        self._initialize(request, *args, **kwargs)
        task_ids = self.request.data.get("base_task_id", [])
        targets, target_task_dict = self.get_target(task_ids)
        if not targets:
            return Success()
        task_data_map = {
            t.task_id: t.job_config_map.get("nscan", {})
            for t in ScanTasks.objects.filter(task_id__in=task_ids)
        }
        for t in targets:
            include_urls = []
            exclude_urls = []
            task_id = target_task_dict.get(t['target_id'])
            crawler_depth = 0
            crawler_max_page = 0
            crawler_scope = 3
            if task_id:
                task_data = task_data_map.get(task_id, {})
                include_urls = task_data.get('include_url', [])
                exclude_urls = task_data.get('crawler_excluded_urls', [])
                if not task_data.get('is_without_crawl', False):
                    # 开启爬虫
                    crawler_scope = 2
                    crawler_depth = task_data.get('crawler_depth', 0)
                    crawler_max_page = task_data.get('crawler_max_page', 0)
            ChangeCheckConfigScheduler(
                user_id=t['user_id'],
                target_id=t['target_id'],
                target_url=t['target_url'],
                base_task_id=task_id,
                is_update_baseline=True,
                include_urls=include_urls,
                exclude_urls=exclude_urls,
                crawler_depth=crawler_depth,
                crawler_max_page=crawler_max_page,
                crawler_scope=crawler_scope,
            ).create()
            data = {
                "module": LogModuleName.policy_management.value,
                "action": AuditLogAction.up.value,
                "type": LogTypeName.change_check_config.value,
                "desc": {
                    "detail": f"更新篡改监测基准，任务组名称： {t['target_group']['target_group_name']}，资产地址：{t['target_url']}"
                },
            }
            audit_log(request=self.request, **data)
        return Success()

    def delete(self, request, *args, **kwargs):
        self._initialize(request, *args, **kwargs)
        task_ids = self.request.data.get("base_task_id", [])
        targets, target_task_dict = self.get_target(task_ids)
        if not targets:
            return Success()
        for t in targets:
            task_id = target_task_dict.get(t['target_id'])
            ChangeCheckConfigScheduler(
                user_id=t['user_id'],
                target_id=t['target_id'],
                target_url=t['target_url'],
                base_task_id=task_id,
                is_delete_baseline=True,
            ).create()
            data = {
                "module": LogModuleName.policy_management.value,
                "action": AuditLogAction.del_.value,
                "type": LogTypeName.change_check_config.value,
                "desc": {
                    "detail": f"删除篡改监测基准，任务组名称： {t['target_group']['target_group_name']}，资产地址：{t['target_url']}"
                },
            }
            audit_log(request=self.request, **data)
        return Success()

    def get_target(self, task_ids):
        target_query = {**self.user_query}
        target_task_map = {}
        for task in ScanTasks.objects.filter(task_id__in=task_ids):
            target_task_map[task.target_id] = task.task_id

        target_query.update({"target_id": {"$in": list(target_task_map.keys())}})
        pipeline = [
            {'$match': target_query},
            {
                '$lookup': {
                    'from': 'scan_target_groups',
                    'let': {'group_id': "$target_group_id"},
                    'pipeline': [
                        {
                            "$match": {
                                "$expr": {
                                    "$and": [
                                        {"$eq": ["$$group_id", "$target_group_id"]}
                                    ]
                                }
                            }
                        },
                        {"$project": {"_id": 0, "target_group_name": 1}},
                    ],
                    'as': 'target_group',
                }
            },
            {
                "$unwind": {
                    "path": "$target_group",
                    "preserveNullAndEmptyArrays": False,
                }
            },
        ]
        data = []
        for item in Targets.objects.aggregate(pipeline):
            data.append(item)

        return data, target_task_map
