import arrow
import re
import json

from flask import request, jsonify, g

from app.libs.ip_local_info import IP_ADDRESS_DB
from app.libs.redprint import RedPrint
from app.libs.regexp import IP_RE
from app.libs.score import Score
from app.libs.seebug import SeeBug
from app.db import redis
from app.db.models.jobs import Job
from app.db.models.tasks import Task
from app.db.models.balances import Balances
from app.db.models.packages import Packages
from app.db.models.vulndb import KbVuln
from app.db.models.asset_tasks import AssetTasks
from app.cache import cache
from app.libs.utility import make_cache_key_global, make_cache_key_uid
from app.libs.public_funcs import get_ava_results
from app.libs.enums import (
    AVAILABLE_TASK_GROUP_TUPLE,
    WS_TASK_GROUP_TUPLE, TaskType,
    TASK_TYPE_CONFIG
)
from app.handler.v2_index import LastTask
from app.handler.v2_index import get_host_vul_warnings

api = RedPrint('index')


@api.route('/overview', methods=['GET'])
@cache.cached(make_cache_key=make_cache_key_uid, timeout=5)
def overview():
    uid = g.user.id
    results = {
        'task_info': {
            **{t: 0 for t in WS_TASK_GROUP_TUPLE + AVAILABLE_TASK_GROUP_TUPLE},
            TaskType.hostVul.value: 0,
        },
        'job_info': {
            'total_jobs': 0,
            'risk_jobs': 0,
            'web': 0,
            'risk_web_jobs': 0,
            'host': 0,
            'risk_host_jobs': 0,
        },
        'pending_task': {
            **{t: 0 for t in WS_TASK_GROUP_TUPLE},
            TaskType.hostVul.value: 0,
            TaskType.asset.value: 0
        }
    }

    web_job_count = 0
    host_job_count = 0
    risk_web_jobs = 0
    risk_host_jobs = 0
    jobs = Job.objects.filter(uid=uid)
    total_job_count = jobs.count()
    if total_job_count > 100:
        update_huge_results(results, uid)
        host_job_count = Job.objects.filter(uid=uid, assetType='host').count()
        web_job_count = total_job_count - host_job_count
        risk_jobs = Job.objects.filter(uid=uid, securityStatus__score__gt=0).count()
        risk_host_jobs = Job.objects.filter(uid=uid, assetType='host', securityStatus__score__gt=0).count()
        risk_web_jobs = risk_jobs - risk_host_jobs

    else:
        for job in jobs:
            score = job.securityStatus.get("score", 0)
            if job.assetType == 'host':
                host_job_count += 1
                if score > 0:
                    risk_host_jobs += 1
            else:
                web_job_count += 1
                if score > 0:
                    risk_web_jobs += 1
        task_type_tuple = WS_TASK_GROUP_TUPLE + AVAILABLE_TASK_GROUP_TUPLE + (TaskType.asset.value, )
        last_task = LastTask(uid, task_type_tuple)
        data = last_task.get()

        task_info = []
        for task_type in task_type_tuple:
            task_info.extend(data.get(task_type, []))

        # task info
        for task in task_info:
            _task_type = task.get('taskType')
            if results['task_info'].get(_task_type) is not None:
                if _task_type in AVAILABLE_TASK_GROUP_TUPLE and len(task.get('warnings')):
                    results['task_info'][_task_type] += 1
                else:
                    results['task_info'][_task_type] += len(task.get('warnings'))

        host_vul_count, job_warning_maps = last_task.get_host_vul_warnings()
        results['task_info']['hostVul'] = host_vul_count

    # job info
    results['job_info']['total_jobs'] = total_job_count
    results['job_info']['web'] = web_job_count
    results['job_info']['host'] = host_job_count
    results['job_info']['risk_jobs'] = risk_web_jobs + risk_host_jobs
    results['job_info']['risk_web_jobs'] = risk_web_jobs
    results['job_info']['risk_host_jobs'] = risk_host_jobs

    # last task
    return jsonify({'code': 200, 'msg': 'ok', 'results': results})


def update_huge_results(results, uid):
    pipeline = [
        {'$match': {'uid': uid, 'taskGroupType': {'$in': ['web_security', 'host']}}},
        {'$group': {
            '_id': '',
            TaskType.hostVul.value: {'$sum': f'$lastResult.{TASK_TYPE_CONFIG[TaskType.hostVul.value]["line"]}.count'},
            **{t: {'$sum': f'$lastResult.{TASK_TYPE_CONFIG[t]["line"]}.count'} for t in WS_TASK_GROUP_TUPLE},
         }}
    ]
    for _ in AssetTasks.objects.aggregate(pipeline):
        for k, v in _.items():
            if k in results['task_info']:
                results['task_info'][k] = v
    results['task_info']['http'] = AssetTasks.objects.filter(httpSecurityStatus='warning', uid=uid).count()
    results['task_info']['ping'] = AssetTasks.objects.filter(pingSecurityStatus='warning', uid=uid).count()


@api.route('/get_plan', methods=['GET'])
def get_plan():
    now_time = arrow.now()
    uid = g.user.id
    balance = Balances.objects.filter(uid=uid).first()
    plan_name = Packages.objects.get(pk=balance.planId).name
    if expire_date := balance.planExpireTime:
        expire_date = arrow.get(expire_date)
        warning_icon = True if expire_date <= now_time else False
        expire_date = expire_date.isoformat()
    else:
        expire_date = ""
        warning_icon = False

    results = {
        'version': plan_name,
        'jobs_count': balance.balance.domain.total,
        'host_job_count': balance.balance.host.total,
        # 'monitor_count': 3,
        'expire_time': expire_date,
        'warning_icon': warning_icon
    }

    return jsonify({'code': 200, 'msg': 'ok', 'results': results})


@api.route('/get_top_jobs', methods=['GET'])
@cache.cached(make_cache_key=make_cache_key_uid, timeout=5)
def get_top_jobs():
    request_args = request.args
    task_type = request_args.get('task_type')
    uid = g.user.id

    match = {'status': 'completed', 'uid': uid}
    reverse = True

    data = []
    if task_type == TaskType.hostVul.value:
        total, job_warning_maps = get_host_vul_warnings(uid)
        for job_id, item in job_warning_maps.items():
            if not item['has_task']:
                continue
            score = Score.score_info(event_map={}, warnings=item.get("warnings", []))
            data.append({
                '_id': str(job_id),
                'job_id': str(job_id),
                'note': item.get("note"),
                'target': item.get("targetUrl"),
                'data': score,
                'task': item.get('task'),
            })
    else:
        if task_type != 'all':
            match['taskType'] = task_type
            job_query = {"verification.isVerified": True, "assetType": {"$ne": "host"}}
            if Job.objects.filter(uid=uid, verification__isVerified=True, assetType__ne='host').count() > 100:
                key = f'lastResult.{TASK_TYPE_CONFIG[task_type]["line"]}.count'
                job_ids = set()
                for _ in AssetTasks.objects.only(*[key, 'jobId']).filter(uid=uid).order_by(f'-{key}'):
                    job_ids.add(_['jobId'])
                    if len(job_ids) == 5:
                        break
                job_query['_id'] = {'$in': list(job_ids)}
            data = LastTask(
                uid=uid, task_type_list=[task_type], job_query=job_query, only_score=True).get_warnings_group_task()
            for item in data:
                item['_id'] = str(item['_id'])
                item['job_id'] = str(item['job_id'])
                warnings = item.pop('warnings', [])
                if task_type in WS_TASK_GROUP_TUPLE:
                    parse_func = getattr(Score, f"_parse_{TASK_TYPE_CONFIG[task_type]['line']}_warnings")
                    item['data'] = parse_func(warnings) if parse_func else {}
                else:
                    item['data'] = Score.score_info(event_map={}, warnings=warnings)
        else:
            jobs = list(Job._get_collection().find({"uid": uid, "verification.isVerified": True,
                                                    "assetType": {"$ne": "host"},
                                                    "securityStatus.score": {"$gt": 0}}
                                                   ).sort([("securityStatus.score", -1)]).limit(5))
            data = []
            default_score = {"level": "未知", "score": 0}
            for job in jobs:
                job_id = str(job.get("_id"))
                data.append({"data": job.get("securityStatus", default_score), "endTime": "", "job_id": job_id,
                             "_id": job_id, "note": job.get("note", ""), "sourceIp": job.get("sourceIp", ""),
                             "taskType": "", "target": job.get("targetUrl", "")})

    data = list(data)
    if task_type == 'all':
        data.sort(key=lambda x: x['data']['score'], reverse=reverse)
    else:
        data.sort(key=lambda x: x['data']['count'], reverse=reverse)

    return jsonify({'code': 200, 'msg': 'ok', 'results': data[:5]})


@api.route('/get_top_ava/<string:task_type>', methods=['GET'])
@cache.cached(make_cache_key=make_cache_key_uid, timeout=5)
def get_top_ava(task_type):
    if task_type not in ('http', 'ping'):
        results = {}
    else:
        results = {task_type: get_ava_results(task_type, abnormal=True)}

    return jsonify({'code': 200, 'msg': 'ok', 'results': results})


@api.route('/monitor-center')
@cache.cached(make_cache_key=make_cache_key_uid, timeout=5)
def monitor_center():
    now_time = arrow.now()

    uid = g.user.id
    region_dict = {}
    job = Job.objects.filter(uid=uid)
    total_jobs = job.count()
    risk_jobs = job.filter(securityStatus__score__gt=0).count()
    for item in job.filter(sourceIp__ne="").only("sourceIp"):
        _source_ip = item.sourceIp
        if not _source_ip or not re.match(IP_RE, _source_ip):
            continue

        region = IP_ADDRESS_DB.get_address_obj(_source_ip, 'CN')
        if not region:
            continue
        if not any([region.get('longitude'), region.get('latitude')]):
            continue
        region_dict.setdefault(f"{region.get('longitude')}_{region.get('latitude')}",
                               {'total_jobs': 0, 'risk_jobs': 0, 'source_ip': _source_ip})

    update_time = ''
    if last := Task.objects.filter(uid=uid).order_by('-endTime').only("endTime").first():
        update_time = arrow.get(last.endTime).isoformat()

    results = {
        'account': {
            'name': g.user.username,
            'days': (now_time - arrow.get(g.user.createAt)).days,
            'total_jobs': total_jobs,
            'risk_jobs': risk_jobs
        },
        'region': region_dict,
        'update_time': update_time
    }
    return jsonify({'code': 200, 'msg': 'ok', 'results': results})


@api.route('/seebug/get')
@cache.cached(make_cache_key=make_cache_key_global, timeout=600)
def get_seebug():
    url = "/get_vul_statistics"
    try:
        seebug_data = SeeBug().get(url)
    except Exception:
        seebug_data = json.loads(redis.get(f"seebug_cache_{url}").decode())
    results = {'total': seebug_data.get('total', 0), 'has_poc': seebug_data.get('has_poc', 0)},

    return jsonify({'code': 200, 'msg': 'ok', 'results': results})


@api.route('/get_vulndb')
@cache.cached(timeout=600, make_cache_key=make_cache_key_global)
def get_vulndb():
    origin_data = KbVuln.objects.find({'source': {'$in': ['kscan', 'nscan', 'cve', 'nuclei']}, 'is_deleted': False}).order_by('-updated_at').paginate(1, 100)
    count = origin_data.total

    name_list = []
    update_at = ''
    for item in origin_data.items:
        name_list.append(item.name.zh_cn)
        if not update_at:
            update_at = arrow.get(item.updated_at).isoformat()

    results = {
        'count': count,
        'names': tuple(set(name_list))[:10],
        'update_at': update_at
    }
    return jsonify({'code': 200, 'msg': 'ok', 'results': results})


@api.route('/keyword_frequencies')
@cache.cached(timeout=5, make_cache_key=make_cache_key_global)
def keyword_frequencies():
    uid = g.user.id
    pipeline = [
        {'$match': {'taskType': 'content', 'uid': uid, 'isLatestTask': True}},
        {'$unwind': {'path': '$result.warnings'}},
        {'$limit': 100000},  # 限制10w上限约3~5秒返回
        {'$unwind': {'path': '$result.warnings.addition.results'}},
        {'$match': {'result.warnings.title': '敏感词'}},
        {'$group': {'_id': '$result.warnings.addition.results.word', 'count': {'$sum': 1}}}
    ]
    ret = {}
    for _ in Task.objects.aggregate(pipeline):
        ret[_['_id']] = _['count']
    return jsonify(ret)
