import datetime
from datetime import timedelta

from bson import ObjectId
from django.utils.dateparse import parse_date
from rest_framework import serializers
from mongolog.models import AppsLog, FileLog, CASLog
from mongolog.serializers import MongoBaseSerializer
from mongolog.utils import date_eq, date_range, res_queryset_id2name, res_queryset_id2name_new
from tools.datetime import range_date, date_filter_new, today_ago_many, as_datetime, aware_localtime
from userdata.serializers import UserSerializer
from zmad.models import ZTPResource, ZTPDevice
from .models import DeviceInfo, Device, ZTPGroup
from .utils import translate_apps_danger_type, translate_apps_sensitive_type


def asset_count_serializer(model, platform=None, filter_resource=False,  **filters):
    """资产统计模板，根据Model和过滤条件生成资产统计数据"""
    today = datetime.date.today()
    if filters['auth_code'] is not None:
        query = model.objects.filter(auth_code__in=filters['auth_code'])
    else:
        query = model.objects.filter(**filters)
    if filter_resource:
        # 过滤有效应用
        query = query.filter(resource__in=list(ZTPResource.objects.values_list('id', flat=True)))
    if platform:
        query = query.filter(platform=platform)
    total_count = query.count()
    today_query = dict()
    today_query['log_time__gte'] = as_datetime(today)
    # today_query['log_time__lt'] = datetime.datetime.combine(today + timedelta(days=1), datetime.time(0, 0))
    today_query['log_time__lte'] = as_datetime(today + timedelta(days=1))
    today_count = query.filter(**today_query).count()

    yesterday_query = dict()
    # yesterday_query['log_time__gte'] = datetime.datetime.combine(today - timedelta(days=1), datetime.time(0, 0))
    # yesterday_query['log_time__lt'] = datetime.datetime.combine(today, datetime.time(0, 0))
    yesterday_query['log_time__gte'] = as_datetime(today - timedelta(days=1))
    yesterday_query['log_time__lte'] = as_datetime(today)
    yesterday_count = query.filter(**yesterday_query).count()

    this_week_query = dict()
    # this_week_query['log_time__gte'] = datetime.datetime.combine(today - timedelta(weeks=1), datetime.time(0, 0))
    # this_week_query['log_time__lt'] = datetime.datetime.combine(today, datetime.time(0, 0))
    this_week_query['log_time__gte'] = as_datetime(today - timedelta(weeks=1))
    this_week_query['log_time__lte'] = as_datetime(today)
    this_week_count = query.filter(**this_week_query).count()

    last_week_query = dict()
    # last_week_query['log_time__gte'] = datetime.datetime.combine(today - timedelta(weeks=2), datetime.time(0, 0))
    # last_week_query['log_time__lt'] = datetime.datetime.combine(today - timedelta(weeks=1), datetime.time(0, 0))
    this_week_query['log_time__gte'] = as_datetime(today - timedelta(weeks=2))
    this_week_query['log_time__lte'] = as_datetime(today - timedelta(weeks=1))
    last_week_count = query.filter(**last_week_query).count()
    # print(today_count, yesterday_count, this_week_count, last_week_count)
    if yesterday_count != 0:
        rate = (today_count - yesterday_count) / yesterday_count
        day_on_day = '%.2f%%' % (rate * 100)
    else:
        day_on_day = 0

    if last_week_count != 0:
        rate = (this_week_count - last_week_count) / last_week_count
        week_on_week = '%.2f%%' % (rate * 100)
    else:
        week_on_week = 0

    return {
        'total': total_count,
        'today': today_count,
        'day_on_day': day_on_day,
        'week_on_week': week_on_week,
    }


def most_visits(date_from, date_to):
    pass
    aggregation = [
        {
            '$match': {
                'date': {'$exists': True},
                'resource': {'$exists': True},
                'platform': {'$exists': True},
                # "auth_code": auth_code
            }
        },
        {
            '$group': {
                '_id': {
                    'resource': '$resource'  # 包含 resource 字段
                },
                'count': {'$sum': 1}
            }
        }]
    match = date_range('log_time', date_from, date_to)
    if match:
        aggregation.insert(0, {'$match': match})
    queryset = list(CASLog.objects.mongo_aggregate(aggregation))
    if len(queryset) > 0:
        resource = max(queryset, key=lambda x: x['count'])['_id']['resource']
        return resource
    else:
        return None


# 针对trend_graph_serializer 进行重构
def trend_graph_serializer_v2(model, field=None, date_from=None, date_to=None, auth_code=None, platform=None,  **filters):
    """趋势图模板，根据Model、要聚合的字段以及过滤条件生成趋势图数据"""
    resource = None
    # 转成数组
    if isinstance(auth_code, int):
        auth_code = [auth_code]
    # 根据日期聚合,区间内top10 访问次数
    aggregation = [
            {
                '$match': {
                    'date': {'$exists': True},
                    'platform': {'$exists': True},
                }
            },
            {
                '$group': {
                    '_id': {
                        'date': '$date',
                        'field': f'${field}'
                    },
                    'count': {'$sum': 1}
                }
            },
            {
                '$group': {
                    '_id': '$_id.field',
                    'date_count': {
                        '$push': {
                            'date': '$_id.date',
                            'count': '$count'
                        }
                    },
                    'total_count': {'$sum': '$count'}
                }
            },
            {'$sort': {'total_count': -1}},
            {'$limit': 10}
    ]
    if field:
        aggregation[0]['$match'][field] = {'$exists': True}
    if platform:
        aggregation[0]['$match']['platform'] = platform
    if auth_code:
        aggregation[0]['$match']['auth_code'] = {'$in': auth_code}
    aggregation[0]['$match'].update(date_range('log_time', date_from, date_to))
    if filters:
        resource = filters.get("resource")
        aggregation[0]['$match'].update(filters)

    pass


def trend_graph_serializer(model, field, date_from=None, date_to=None,
                           auth_code=None, platform=None,  **filters):
    """趋势图模板，根据Model、要聚合的字段以及过滤条件生成趋势图数据"""
    if auth_code == 7:
        auth_code_equal = [7]
    else:
        auth_code_equal = auth_code
    if auth_code is not None:
        aggregation_old = [
                {
                    '$match': {
                        'date': {'$exists': True},
                        field: {'$exists': True},
                        'platform': {'$exists': True},
                        "auth_code": {'$in': auth_code_equal},
                    }
                },
                {
                    '$group': {
                        '_id': {
                            'date': '$date',
                            'field': f'${field}'
                        },
                        'count': {'$sum': 1}
                    }
                },
                {
                    '$group': {
                        '_id': '$_id.field',
                        'date_count': {
                            '$push': {
                                'date': '$_id.date',
                                'count': '$count'
                            }
                        },
                        'total_count': {'$sum': '$count'}
                    }
                },
                {'$sort': {'total_count': -1}},
                {'$limit': 10}
        ]
    else:
        aggregation_old = [
            {
                '$match': {
                    'date': {'$exists': True},
                    field: {'$exists': True},
                    'platform': {'$exists': True},
                }
            },
            {
                '$group': {
                    '_id': {
                        'date': '$date',
                        'field': f'${field}'
                    },
                    'count': {'$sum': 1}
                }
            },
            {
                '$group': {
                    '_id': '$_id.field',
                    'date_count': {
                        '$push': {
                            'date': '$_id.date',
                            'count': '$count'
                        }
                    },
                    'total_count': {'$sum': '$count'}
                }
            },
            {'$sort': {'total_count': -1}},
            {'$limit': 10}
        ]
    if platform:
        aggregation_old[0]['$match']['platform'] = platform
    if auth_code is not None:
        aggregation = [
            {
                '$match': {
                    'date': {'$exists': True},
                    # field: {'$exists': True},
                    'platform': {'$exists': True},
                    "auth_code": {'$in': auth_code_equal},
                }
            },
            {
                '$group': {
                    '_id': {
                        'date': '$date',
                        'platform': '$platform'  # 包含 platform 字段
                    },
                    'count': {'$sum': 1}
                }
            },
            {
                '$group': {
                    '_id': '$_id.date',
                    'date_count': {
                        '$push': {
                            'platform': '$_id.platform',
                            'count': '$count'
                        }
                    },
                    # 'total_count': {'$sum': '$count'}
                }
            },
            # {'$sort': {'total_count': -1}},
            # {'$limit': 10}
        ]
        if platform:
            aggregation[0]['$match']['platform'] = platform
        if auth_code is not None:   #
            # 此处用于处理脏数据
            aggregation[0]['$match']['resource'] = {'$exists': True}
        match = date_range('log_time', date_from, date_to)
        if filters:
            resource = filters.get("resource")
            match.update(filters)
        if match:
            aggregation.insert(0, {'$match': match})
        # 1 移动端 2 客户端 3 web端
        device_dict = {1: "移动端", 2: "PC端", 3: "web端", 4: "Linux端"}
        if platform:
            device_dict = {platform: device_dict[platform]}
        queryset = list(model.objects.mongo_aggregate(aggregation))
        # res_mapping = dict(ZTPResource.objects.values_list('pk', 'name'))
        # print(166, date_from, date_to)
        if auth_code is not None:
            field2dc_list = []
            platform_list = []
            for agg in queryset:
                date_counts = {i: 0 for i in device_dict.values()}
                # date_counts = {"移动端": 0, "PC端": 0, "web端": 0, "Linux端": 0}
                for dc in agg['date_count']:
                    if dc.get('platform'):
                        platform_str = device_dict[dc['platform']]
                        platform_list.append(platform_str)
                        # date_counts[platform] = dc['count']
                        field2dc = {'date': agg["_id"], 'type': platform_str, 'count': dc['count']}
                    field2dc_list.append(field2dc)
                for data_platform in date_counts.keys():
                    if data_platform not in platform_list:
                        field2dc_list.append({'date': agg["_id"], 'type': data_platform, 'count': 0})
            field2dc_list = sorted(field2dc_list, key=lambda item: item['date'])
            # field2dc_old = {
            #     agg['_id']: {parse_date(dc['date']): dc['count'] for dc in agg['date_count'] if dc.get('date')}
            #     for agg in queryset}
            # 找出起止日期
            # 如果提供了date_from/date_to，则展示的起/止日期为date_from/date_to，
            # 否则，展示的起/止日期为有记录以来的最远日期/今日
            if not date_from:
                if len(field2dc_list) > 0:
                    date_set = {dc.get("date") for dc in field2dc_list}
                    date_from = min(date_set)
                    date_from = datetime.datetime.strptime(date_from, "%Y-%m-%d").date()
                else:
                    date_from = today_ago_many(days=7)
            if not date_to:
                date_to = datetime.date.today()

            # 组装数据
            field2date_list = [field2date["date"] for field2date in field2dc_list]
            field2dc_list_new = []
            # print(205, field2dc_list)
            # 补充没有的日期内的数据默认为0
            if not date_from and not date_to:
                date_from, date_to = date_filter_new({})
            for date in range_date(date_from, date_to):
                if date.strftime('%Y-%m-%d') in field2date_list:
                    for field2dc_data in field2dc_list:
                        if date.strftime('%Y-%m-%d') == field2dc_data['date']:
                            field2dc_list_new.append(field2dc_data)
                else:
                    field2dc_list_new.extend(
                        {'date': date.strftime('%Y-%m-%d'), "type": _type, "count": 0}
                        for _type in device_dict.values())
            # print(213, field2dc_list_new)
    else:
        # todo 这边的分支是有问题的, 需要修改
        aggregation = [
            {
                '$group': {
                    '_id': {
                        'date': '$date',
                        'field': f'${field}'
                    },
                    'count': {'$sum': 1}
                }
            },
            {
                '$match': {
                    '_id.date': {'$exists': True},
                    '_id.field': {'$exists': True},
                }
            },
            {
                '$group': {
                    '_id': '$_id.field',
                    'date_count': {
                        '$push': {
                            'date': '$_id.date',
                            'count': '$count'
                        }
                    },
                    'total_count': {'$sum': '$count'}
                }
            },
            {'$sort': {'total_count': -1}},
            {'$limit': 10}
        ]
        if platform:
            aggregation_old[0]['$match']['platform'] = platform
        match = date_range('log_time', date_from, date_to)
        if filters:
            match.update(filters)
        if match:
            aggregation.insert(0, {'$match': match})

        queryset = list(model.objects.mongo_aggregate(aggregation))

        field2dc = {agg['_id']: {parse_date(dc['date']): dc['count'] for dc in agg['date_count'] if dc.get('date')}
                    for agg in queryset}
        date_set = {d for dc in field2dc.values() for d in dc.keys()}

        if date_set:
            date_from = date_from or min(date_set)
            date_to = date_to or datetime.date.today()
        else:
            date_from = date_to = datetime.date.today()

        trend = {date: {'date': date.strftime('%Y-%m-%d'), **{f: 0 for f in field2dc.keys()}}
                 for date in range_date(date_from, date_to)}
        try:
            for f, dc in field2dc.items():
                for date, count in dc.items():
                    trend[date][f] = count
        except KeyError:
            pass

    if auth_code is not None:
        if filters.get("username"):
            match = date_range('log_time', date_from, date_to)
            match.update(filters)
            aggregation_old.insert(0, {'$match': match})
            queryset_old = list(model.objects.mongo_aggregate(aggregation_old))
            queryset_old = res_queryset_id2name(queryset_old)
            top10 = [{'field': agg['_id'], 'count': agg['total_count']} for agg in queryset_old]
            return {'top': top10[:10], 'fields': top10[:10], 'trend': field2dc_list_new}
        else:
            queryset_old = list(model.objects.mongo_aggregate(aggregation_old))
            if model == CASLog and field == 'resource':
                queryset_old = res_queryset_id2name(queryset_old)
            top10 = [{'field': agg['_id'], 'count': agg['total_count']} for agg in queryset_old]
            return {'top': top10[:10], 'fields': top10[:10], 'resource': resource, 'trend': field2dc_list_new}
    else:
        queryset_old = list(model.objects.mongo_aggregate(aggregation_old))
        if model == CASLog and field == 'resource':
            queryset_old = res_queryset_id2name(queryset_old)
        top10 = [{'field': agg['_id'], 'count': agg['total_count']} for agg in queryset_old]
        return {'top': top10[:10], 'fields': top10[:10], 'trend': list(trend.values())}


def malware_dist_serializer(maltype):
    """终端威胁应用分类"""
    if maltype == 'danger':
        apps = 'apps_danger'
        field = 'vir_type'
        translator = translate_apps_danger_type
    elif maltype == 'sensitive':
        apps = 'apps_sensitive'
        field = 'sensitive_description'
        translator = translate_apps_sensitive_type
    else:
        raise serializers.ValidationError('威胁类型无效。')

    queryset = AppsLog.objects.mongo_aggregate([
        {
            '$match': {
                'device_info.device_uuid': {
                    '$exists': True,
                    '$ne': ''
                },
                f'{apps}.0': {'$exists': True}
            }
        },
        {'$unwind': f'${apps}'},
        {
            '$group': {
                '_id': f'${apps}.{field}',
                'uuid_list': {'$addToSet': '$device_info.device_uuid'}
            }
        },
        {
            '$project': {
                'count': {'$size': '$uuid_list'}
            }
        }
    ])
    return [{'name': translator(agg['_id']), 'count': agg['count']} for agg in queryset]


class TerminalSerializer(serializers.ModelSerializer):
    user = UserSerializer(read_only=True)
    risk_list = serializers.SerializerMethodField()

    def get_risk_list(self, instance):
        risk_list = []
        if instance.danger_app:
            risk_list.append("终端检出恶意apk")
        if instance.risk_app:
            risk_list.append("终端检出风险apk")
        if instance.sensitive_app:
            risk_list.append("终端检出敏感apk")

        return risk_list

    class Meta:
        model = DeviceInfo
        exclude = ('security_description', 'object_id')


class TerminalRetrieveSerializer(TerminalSerializer):
    class Meta:
        model = DeviceInfo
        exclude = ('security_description', 'object_id')

    def to_representation(self, instance):
        ret = super().to_representation(instance)
        # Query and populate the latest installed apps list.
        # 日志里各种随便加字段，我已经不想确认到底有哪些字段了，直接弃用ORM返回原始字典吧
        apps_fields = ['apps', 'apps_danger', 'apps_risk', 'apps_sensitive', 'apps_unknow', 'apps_harmless']
        apps_list = AppsLog.objects.mongo_find_one(
            {'_id': ObjectId(instance.object_id)},
            {
                '_id': 0,
                **{field: 1 for field in apps_fields}
            }
        )
        if apps_list is None:
            # 这种情况理论上不会出现，除非对应的日志被删除了
            apps_list = {field: [] for field in apps_fields}
        ret.update(apps_list)
        return ret


class FileSerializer(serializers.ModelSerializer, MongoBaseSerializer):
    source = serializers.SerializerMethodField()

    class Meta:
        model = FileLog
        fields = ('id', 'log_time', 'filename', 'source', 'username', 'sen_level', "sen_type", "sen_words")

    def get_source(self, instance):
        if instance.source == FileLog.SOURCE_DOWNLOAD:
            return '文件下载'
        if instance.source == FileLog.SOURCE_SHARE:
            return '文件共享'
        if instance.source == FileLog.SOURCE_EMAIL:
            return '邮件附件'
        # Otherwise, return None


class PCTerminalSerializer(serializers.ModelSerializer):

    class Meta:
        model = ZTPDevice
        fields = '__all__'
        # read_only_fields = ('is_secure', 'device_type')


class TerminalRiskSerializer(serializers.ModelSerializer):
    user = UserSerializer(read_only=True)
    # 修正 risk_list 和 is_secure 字段不匹配
    device_info = Device.objects.filter(risk_list=list()).all()
    for device_ in device_info:
        device_.is_secure = True
        device_.save()
    queryset = Device.objects.all()
    class Meta:
        model = Device
        fields = '__all__'


class TerminalDeviceSerializer(serializers.ModelSerializer):
    user = UserSerializer(read_only=True)
    group_name = serializers.SerializerMethodField()

    def get_group_name(self, instance):
        return instance.user.belong_to.name

    class Meta:
        model = Device
        fields = '__all__'
