"""资产复杂逻辑拆分 node2py"""
import datetime
import re
import requests

import ipaddress
from bson import ObjectId
from flask import request

from app.db.models.tasks import Task, TaskSpiderUrl, ChangeCheckBasepage
from app.db.models.jobs import Job, ProxyIpResult
from app.db.models.alert_groups import AlertGroup
from app.db.models.balances import Balances
from app.db.models.assets import Asset
from app.db.models.alerts import Alert
from app.db.models.packages import Packages
from app.db.models.users import User
from app.db.models.asset_tasks import AssetTasks
from app.db.models.siteportraits import SitePortRaits, SitePortRaitTask
from app.db.models.tasks import IgnoreWarning
from app.config.settings import SERVICE_API, YUNAQ_DOMAIN
from app.log.logs import UJobLog, DAssetLog, CAssetLog, UAssetLog
from app.handler.v2_jobs import JobVerifyHelper
from app.libs import scanner
from app.libs.enums import (
    SECURITY_EVENT_PLUGIN_TUPLE, CONTENT_PLUGIN_TUPLE,
    TASK_TYPE_CONFIG, CHANGE_CHECK_TYPE_TUPLE,
    WS_TASK_GROUP_TUPLE, AVAILABLE_TASK_GROUP_TUPLE,
    TaskType, AssetTaskGroup, TaskTriggerType
)
from app.libs.ip import check_source_ip
from app.libs.utility import get_default_area
from app.libs.utils import balance_task_count_sync, enable_task_group
from app.libs.regexp import IP_RE, IPV6_RE, NAME_RE
from app.libs.score import Score
from app.validators.asset import TargetUrl


def asset_with_queue(results):
    # 是否在队列中  逻辑参照node
    asset_ids, job_ids = [], []
    for r in results:
        if r.get('assetId'):
            asset_ids.append(r['assetId'])
        job_ids.append(r['_id'])

    asset_dict = {item.id: item.to_mongo().to_dict() for item in Asset.objects.filter(pk__in=asset_ids)}
    wait_tasks = Task.objects.find({
        'refId': {'$in': asset_ids + job_ids},
        'status': {'$in': ['waiting', 'active']}
    }).distinct('refId')
    for r in results:
        if not (asset_id := r.get('assetId')):
            continue
        r['assetSettings'] = asset_dict.get(asset_id, {}).get('taskSettings', {})
        if asset_id in wait_tasks or r['_id'] in wait_tasks:
            r['inTaskQueue'] = True
        else:
            r['inTaskQueue'] = False
    return results


def get_task_check_status(settings):
    # 0 表示没有创建任务 1表示任务正常 2表示任务关闭
    if settings.get("enable", False) and settings.get("taskId", ""):
        return 1
    elif not settings.get("enable", False) and settings.get("taskId", ""):
        return 2
    return 0


def web_asset_list_detail(results):
    """web资产"""
    job_ids = [r['_id'] for r in results]
    query = {'_id': {'$in': job_ids}}
    project = {'asset': 1, 'asset_task': 1}
    aggregate = [
        {'$match': query},
        {'$lookup': {'from': "assets", 'localField': f"assetId", 'foreignField': "_id", 'as': 'asset'}},
        {'$lookup': {'from': "asset_tasks", 'localField': f"_id", 'foreignField': "jobId", 'as': 'asset_task'}},
        {'$project': project}
    ]
    job_status = {job['_id']: job for job in Job.objects.aggregate(aggregate)}
    job_ipv6_cn_maps = {"support": "支持", "nonsupport": "不支持"}
    web_security_schedule_q = {"jobId": {"$in": job_ids}, "taskGroupType": "web_security", "triggerType": "schedule"}
    job_web_security_schedule_map = {at.jobId: True for at in AssetTasks.objects.find(web_security_schedule_q).only("jobId")}
    task_type_id_maps = {t: [] for t in WS_TASK_GROUP_TUPLE + AVAILABLE_TASK_GROUP_TUPLE + (TaskType.asset.value, )}
    for j in results:
        for t in WS_TASK_GROUP_TUPLE:
            if _id := j.get(f"{t}ResultId"):
                task_type_id_maps[t].append(_id)
        for t in AVAILABLE_TASK_GROUP_TUPLE:
            if _ids := j.get(f"{t}ResultId"):
                task_type_id_maps[t].extend([_ for _ in _ids if _])
    for k, v in job_status.items():
        if v.get("asset", []) and v["asset"][0].get("resultId"):
            task_type_id_maps[TaskType.asset.value].append(v["asset"][0]["resultId"])
    result_map = {}
    for t in WS_TASK_GROUP_TUPLE:
        if t != TaskType.security_event.value:
            result_map.update({
                getattr(at, f"{t}ResultId"): at.lastResult.get(TASK_TYPE_CONFIG[t]["line"], {})
                for at in AssetTasks.objects.find(
                    {f"{t}ResultId": {"$in": task_type_id_maps[t]}}
                ).only(f"{t}ResultId", "lastResult") if getattr(at, "lastResult")})
        else:
            result_map.update({
                at.securityEventResultId: (
                    at.lastResult.get(TASK_TYPE_CONFIG[t]["line"], {}), at.lastResult.get("security_event_severity", {})
                ) for at in AssetTasks.objects.find(
                    {f"{t}ResultId": {"$in": task_type_id_maps[t]}}
                ).only(f"{t}ResultId", "lastResult") if getattr(at, "lastResult")})
    task_ids = []
    for ids in task_type_id_maps.values():
        task_ids.extend(ids)
    exists_tasks = Task.objects.filter(id__in=task_ids).distinct("_id")
    task_status = {_: _ in exists_tasks for _ in task_ids}
    data = []
    for result in results:
        result_status = job_status.get(result['_id'])
        security_status = result.get("securityStatus", {})

        http_result_ids = result.get("httpResultId", [])
        if None in http_result_ids:
            http_result_ids.remove(None)

        ping_result_ids = result.get("pingResultId", [])
        if None in ping_result_ids:
            ping_result_ids.remove(None)
        task_info = {}
        for t in WS_TASK_GROUP_TUPLE:
            _id = result.get(f"{t}ResultId", "")
            task_info.update({f"{TASK_TYPE_CONFIG[t]['line']}_result_id": _id})
            task_info.update({f"{TASK_TYPE_CONFIG[t]['line']}_result_status": task_status.get(_id, False)})
            # 0 表示没有创建任务 1表示任务关闭 2 表示任务正常
            task_info.update({f"{TASK_TYPE_CONFIG[t]['line']}_task_checked": 0})
        for t in AVAILABLE_TASK_GROUP_TUPLE:
            result_ids = [_ for _ in result.get(f"{t}ResultId", []) if _]
            task_info.update({f"{t}_result_id": result_ids})
            task_info.update({f'{t}_result_status': [{str(_): task_status.get(_, False)} for _ in result_ids]})
            task_info.update({f"{TASK_TYPE_CONFIG[t]['line']}_task_checked": 0})
        task_info.update({
            'asset_result_id': "",
            'asset_result_status': False,
            'asset_task_checked': 0,
        })

        date_item = {
            "uid": result.get('uid'),
            "note": result.get('note'),
            "target_url": result.get('targetUrl'),
            "job_id": result.get("_id"),
            "source_ip": result.get("sourceIp"),
            "level": security_status.get("level", "未知"),
            "score": security_status.get("score", 0),
            "update_time": security_status.get("update_time", ""),
            "alive_status": "normal",
            "is_verified": result.get('verification', {}).get('isVerified', False),
            "challenge": result.get('verification', {}).get('challenge', ''),
            "is_checked": True if security_status.get("update_time", "") else False,
            "asset_checked": True,
            "enable_monitor": result.get("enableMonitor"),
            "enable_alert": result.get("enableAlert"),
            "create_time": result.get("createTime"),
            "is_support_ipv6": job_ipv6_cn_maps.get(result.get('isSupportIpv6'), "未知"),
            "defense_target_id": str(result.get("defense_target_id", "")),
            "task": task_info,
            "package": "",
            "username": "",
            "last_result": {},
            "web_security_schedule_task": job_web_security_schedule_map.get(result["_id"], False),
            "enable_proxy_ip": result.get("enable_proxy_ip", False),
            "proxy_ip_result": result.get("proxy_ip_result") or [],
        }
        last_result = {}
        for task_type in WS_TASK_GROUP_TUPLE:
            if not (res_id := result.get(f"{task_type}ResultId")):
                continue
            if not (res_monitor := result_map.get(res_id, {})):
                continue
            if task_type == TaskType.security_event.value:
                last_result.update({"security_event": res_monitor[0],  "security_event_severity": res_monitor[1]})
            else:
                last_result.update({TASK_TYPE_CONFIG[task_type]['line']: res_monitor})
        date_item['last_result'].update(last_result)
        if not result_status:
            data.append(date_item)
            continue
        not_ws_task_group_map = {
            AssetTaskGroup.http.value: TaskType.http.value,
            AssetTaskGroup.web_asset.value: TaskType.asset.value,
            AssetTaskGroup.ping.value: TaskType.ping.value,
        }
        if result_status.get('asset_task'):
            asset_tasks = result_status.get('asset_task')
            for asset_task in asset_tasks:
                if asset_task.get("taskGroupType") == "web_security":
                    for task_type in WS_TASK_GROUP_TUPLE:
                        key = "{}_task_checked".format(TASK_TYPE_CONFIG[task_type]["line"])
                        skey = "{}Settings".format(task_type)
                        settings = asset_task.get(skey, {})
                        task_checked = get_task_check_status(settings)
                        if task_checked > date_item['task'][key]:
                            date_item['task'][key] = task_checked
                elif asset_task.get("taskGroupType") in not_ws_task_group_map:
                    _task_type = not_ws_task_group_map[asset_task.get("taskGroupType")]
                    key = f"{_task_type}_task_checked"
                    skey = f"{_task_type}Settings"
                    settings = asset_task.get(skey, {})
                    task_checked = get_task_check_status(settings)
                    if task_checked > date_item['task'][key]:
                        date_item['task'][key] = task_checked
        if result_status.get("asset") and result_status.get("asset")[0].get("resultId"):
            asset_result_id = result_status.get("asset")[0].get("resultId")
            date_item['task']['asset_result_id'] = asset_result_id
            date_item['task']['asset_result_status'] = task_status.get(asset_result_id, False)
        data.append(date_item)
    return data


def host_asset_list_detail(results):
    """主机资产列表"""
    project = {'asset_task': 1}
    job_ids = [r['_id'] for r in results]
    query = {'_id': {'$in': job_ids}}

    aggregate = [
        {'$match': query},
        {'$lookup': {'from': "assets", 'localField': f"assetId", 'foreignField': "_id", 'as': 'asset'}},
        {'$lookup': {'from': "asset_tasks", 'localField': f"_id", 'foreignField': "jobId", 'as': 'asset_task'}},
        {'$project': project}
    ]
    job_status = {job['_id']: job for job in Job.objects.aggregate(aggregate)}
    host_schedule_q = {"jobId": {"$in": job_ids}, "taskGroupType": "host", "triggerType": "schedule"}
    job_host_schedule_map = {at.jobId: True for at in AssetTasks.objects.find(host_schedule_q).only("jobId")}
    task_aggregate = [
        {"$match": {"jobId": {"$in": job_ids}, "taskType": "hostVul", "status": "completed"}},
        {"$project": {"_id": 1, 'endTime': 1, 'jobId': 1}},
        {"$sort": {"endTime": -1}},
        {"$group": {"_id": "$jobId", "tasks": {"$push": "$$ROOT"}}},
        {"$project": {"_id": 0, "tasks": {"$slice": ["$tasks", 1]}}}
    ]
    res_task_ids = [t.get("tasks")[0].get("_id") for t in Task._get_collection().aggregate(task_aggregate)]
    job_res_map = {at.jobId: at.lastResult.get("host_vul", {}) for at in AssetTasks.objects.find(
        {"hostVulResultId": {"$in": res_task_ids}}).only("jobId", "lastResult") if getattr(at, "lastResult")}
    data = []
    for result in results:
        result_status = job_status.get(result['_id'], {})
        securityStatus = result.get("securityStatus", {})
        date_item = {
            "uid": result.get('uid'),
            "note": result.get('note'),
            "target_url": result.get('targetUrl'),
            "job_id": result.get("_id"),
            "source_ip": result.get("sourceIp"),
            "score": securityStatus.get("score", 0),
            "level": securityStatus.get("level", "未知"),
            "update_time": securityStatus.get("update_time", ""),
            "alive_status": "normal",
            "is_verified": result.get('verification', {}).get('isVerified', False),
            "is_checked": False,
            "enable_monitor": result.get("enableMonitor"),
            "create_time": result.get("createTime"),
            "task": {
                "host_vul_task_checked": 0,
            },
            "package": "",
            "username": "",
            "last_result": job_res_map.get(result["_id"], {}),
            "host_schedule_task": job_host_schedule_map.get(result["_id"], False)
        }

        if not result_status:
            data.append(date_item)
            continue
        if result_status.get('asset_task'):
            asset_tasks = result_status.get('asset_task')
            for asset_task in asset_tasks:
                key = "host_vul_task_checked"
                skey = "hostVulSettings"
                settings = asset_task.get(skey, {})
                task_checked = get_task_check_status(settings)
                if task_checked > date_item['task'][key]:
                    date_item['task'][key] = task_checked
        date_item['is_checked'] = True if result_status else False
        data.append(date_item)

    return data


def query_asset_list(obj, query, resp_fields, page=None, count=None, sort=None, monitor_type=None, task_level=None):
    """资产列表查询"""
    asset_type = query.get('assetType', 'web')
    result_monitor_type = monitor_type
    if TASK_TYPE_CONFIG.get(monitor_type):
        result_monitor_type = TASK_TYPE_CONFIG[monitor_type]["line"]
    if Score.check_level_num(task_level):
        result_ids = [getattr(o, f"{monitor_type}ResultId") for o in obj.find(query).only("id", f"{monitor_type}ResultId")
                      if getattr(o, f"{monitor_type}ResultId")]
        score_range = Score.severityMap[int(task_level)]["score_range"]
        asset_task_q = {
            f"{monitor_type}ResultId": {"$in": result_ids},
            f"lastResult.{result_monitor_type}.score": {"$gte": score_range[0], "$lte": score_range[1]}
        }
        if task_level == "1":
            asset_task_q[f"lastResult.{result_monitor_type}.low"] = {"$exists": 1}
        job_ids = [at.jobId for at in AssetTasks.objects.find(asset_task_q).only("jobId")]
        query = {"_id": {"$in": job_ids}}
    elif task_level in SECURITY_EVENT_PLUGIN_TUPLE + CONTENT_PLUGIN_TUPLE + CHANGE_CHECK_TYPE_TUPLE:
        result_ids = [getattr(o, f"{monitor_type}ResultId") for o in obj.find(query).only("id", f"{monitor_type}ResultId")
                      if getattr(o, f"{monitor_type}ResultId")]
        asset_task_q = {
            f"{monitor_type}ResultId": {"$in": result_ids},
            f"lastResult.{result_monitor_type}.{task_level}": {"$gt": 0}
        }
        job_ids = [at.jobId for at in AssetTasks.objects.find(asset_task_q).only("jobId")]
        query = {"_id": {"$in": job_ids}}
    elif str(task_level) == "0":
        job_task_map_list = [{o.id: getattr(o, f"{monitor_type}ResultId")} for o in obj.find(query).only("id", f"{monitor_type}ResultId")]
        job_ids = []
        task_ids = []
        for item in job_task_map_list:
            for jid, tid in item.items():
                job_ids.append(jid)
                if tid:
                    task_ids.append(tid)
        has_result_job_ids = Task.objects.find({
            "_id": {"$in": task_ids},
            "$or": [
                {"result.targetStatus.status": {"$exists": 0}},
                {"result.targetStatus.status": "good"}
            ]
        }).distinct("jobId")
        warn_result_job_ids = Task.objects.find({
            "_id": {"$in": task_ids}, "securityStatus": "warning"
        }).distinct("jobId")
        warn_asset_tasks_job_ids = AssetTasks.objects.find({
            f"{monitor_type}ResultId": {"$in": task_ids},
            f"lastResult.{result_monitor_type}.score": {"$gt": 0}
        }).distinct('jobId')
        job_ids = list(set(job_ids) - set(has_result_job_ids) - set(warn_result_job_ids) - set(warn_asset_tasks_job_ids))
        query = {"_id": {"$in": job_ids}}
    data = obj.find(query)
    total = data.count()  # 数据总量
    if sort:
        data = data.order_by(*sort)
    if resp_fields:
        data = data.only(*resp_fields)
    if page and count:
        if (page - 1) * count >= total:
            return [], total
        data = data.paginate(page, count)
        results = list(data.items.as_pymongo())
    else:
        results = [item.to_mongo().to_dict() for item in data]
    if asset_type == 'host':
        results = host_asset_list_detail(results)
    else:
        results = web_asset_list_detail(results)

    return results, total


def get_user_packages(uid_list):
    user_maps = dict()
    package_ids = []
    user_package_maps = dict()

    users = User.objects.find({'_id': {"$in": uid_list}})
    for user in users:
        user_maps.update({user.pk: user.username})
        user_package_maps.update({user.pk: user.planId})
        package_ids.append(user.planId)
    packages = Packages.objects.find({"_id": {"$in": package_ids}})
    package_maps = {package.pk: package.name for package in packages}
    return user_maps, package_maps, user_package_maps


def param_prepare(data):
    """参数预处理"""
    protocol = f"{data['url'].scheme}:"
    host = f"{data['url'].host}:{data['url'].port}" if data['url'].port else data['url'].host
    hostname = data['url'].host
    port = data['url'].port
    if not port:
        port = 80 if protocol == 'http:' else 443  # a.b.c.d.e.com ==> e.com
    ipType = 4 if re.match(IP_RE, hostname) or re.match(IPV6_RE, hostname) else 0

    if ipType == 4:
        eTLD1 = None
    else:
        eTLD1 = '.'.join(reversed(hostname.split('.')[-1:-3:-1]))

    target_url = f'{protocol}//{host}'

    return {
        "protocol": protocol,
        "host": host,
        "hostname": hostname,
        "port": int(port),
        "ipType": ipType,
        "eTLD1": eTLD1,
        'note': data['note'],
        'targetUrl': target_url,
        'sourceIp': data['sourceIp']
    }


def host_asset_param_prepare(data):
    """参数预处理"""
    host = data['sourceIp']
    hostname = host
    try:
        ip_obj = ipaddress.IPv4Address(hostname)
    except (ipaddress.AddressValueError, ipaddress.NetmaskValueError):
        return False, {}
    return True, {
        "protocol": 'http:',
        "host": host,
        "hostname": hostname,
        "port": None,
        "ipType": 4,
        "eTLD1": None,
        'note': data['note'],
        'targetUrl': data['sourceIp'],
        'sourceIp': data['sourceIp']
    }


def yunaq(domain, username):
    """是否使用创宇盾"""
    headers = {
        'Authorization': f'Basic {YUNAQ_DOMAIN["credentials"]}',
        'user-agent': 'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_3) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/81.0.4044.122 Safari/537.36'
    }
    try:
        resp = requests.get(url=YUNAQ_DOMAIN['api'], params={'domain': domain, 'username': username}, headers=headers)
    except requests.ConnectionError:
        return False
    if resp.status_code == 200:
        resp_json = resp.json()
        if resp_json.get('status') == 'success' and resp_json.get('msg') == 'success':
            return True
    return False


def _add_job(data, uid, job_id, asset_id, site_portrait_id, alert_group_id, username, asset_type='web',
             is_verified=False, enable_monitor=False, enable_alert=False):
    job_json = {
        'target': {
            "protocol": data['protocol'],
            "host": data['host'],
            "hostname": data['hostname'],
            "port": data['port'],
            "ipType": data['ipType'],
            "eTLD1": data['eTLD1']
        },
        'note': data['note'],
        'targetUrl': data['targetUrl'],
        'sourceIp': data['sourceIp'],
        'id': job_id,
        'uid': uid,
        'sitePortraitId': site_portrait_id,
        'assetType': asset_type,
    }
    if asset_id:
        job_json['assetId'] = asset_id

    job = Job(**job_json)

    if asset_type == 'web':
        default_target = f"{data['targetUrl']}/"
        default_area = get_default_area(data['targetUrl'], data['sourceIp'])

        job.httpSettings[0].target = default_target
        job.httpSettings[0].sourceIp = data['sourceIp']
        job.httpSettings[0].collect.area = default_area['area']
        job.httpSettings[0].collect.area_ipv6 = default_area['area_ipv6']

        job.pingSettings[0].target = default_target
        job.pingSettings[0].sourceIp = data['sourceIp']
        job.pingSettings[0].collect.area = default_area['area']
        job.pingSettings[0].collect.area_ipv6 = default_area['area_ipv6']

        if data['protocol'] == 'http:':
            job.sslSettings.enable = False
    else:
        job.httpSettings[0].target = data['sourceIp']
        job.httpSettings[0].sourceIp = data['sourceIp']
        job.httpSettings[0].collect.area = []
        job.httpSettings[0].collect.area_ipv6 = []

        job.pingSettings[0].target = data['sourceIp']
        job.pingSettings[0].sourceIp = data['sourceIp']
        job.pingSettings[0].collect.area = []
        job.pingSettings[0].collect.area_ipv6 = []

        job.sslSettings.enable = False

    job.alertSettings['alertGroupIds'] = [alert_group_id]
    job.enableMonitor = enable_monitor
    job.enableAlert = enable_alert

    is_yunaq = yunaq(domain=job.target.hostname, username=username)
    if is_yunaq:
        job.cydEnabled = True
        job.verification.isVerified = True
        job.verification.verifyType = 'yunaq'
    else:
        job.cydEnabled = False
        job.verification.isVerified = is_verified

    job.save()
    return job


def _add_asset(data, uid, alert_group_id, target, job_id, asset_id, target_type, asset_type='web'):
    """添加资产"""
    asset_json = {
        'uid': uid,
        'target': data['host'],
        'id': asset_id,
        'assetType': asset_type,
    }
    asset = Asset(**asset_json)
    asset.taskSettings.target = target
    asset.alertSettings.alertGroupIds = [alert_group_id]
    asset.targetType = 'domain' if target_type == 0 else 'ip'
    asset.jobId = job_id

    asset.save()
    return asset


def _update_balance(uid, key='inc__balance__domain__used', increment=1):
    """余额变更"""
    balance = Balances.objects(uid=uid)
    if increment < 0:  # 减法 需要考虑是否小于0
        pass  # todo
    balance.update_one(**{key: increment})  # 删除时 increment = -1


def add_asset(data, uid, username):
    """添加资产"""
    alert_group = AlertGroup.objects.filter(uid=uid, isDefault=True).first()
    if not alert_group:
        return False, '未设置告警组'

    is_verified = data.get('is_verified', False)
    enable_monitor = data.get('enable_monitor', False)
    enable_alert = data.get('enable_alert', False)
    alert_group_id = str(alert_group.id)
    param = param_prepare(data)

    exists = Job.objects.find({'uid': uid, 'targetUrl': param['targetUrl'], 'sourceIp': param['sourceIp']})
    if exists:
        return False, '当前资产已添加'

    balance = Balances.objects.find_one({'uid': uid})
    if not balance:
        return False, '无关联余额信息'

    if balance.balance.domain.used >= balance.balance.domain.total:
        return False, '资产数量已到达套餐上限'

    asset_id, job_id, site_portrait_id = ObjectId(), ObjectId(), ObjectId()
    asset = _add_asset(param, uid, alert_group_id, param['targetUrl'], job_id, asset_id, target_type=param['ipType'])
    job = _add_job(param, uid, job_id, asset_id, site_portrait_id, alert_group_id, username=username,
                   is_verified=is_verified, enable_monitor=enable_monitor, enable_alert=enable_alert)
    balance_task_count_sync(uid, balance)
    # 如果是认证资产需要下发资产变更监测任务
    if is_verified and job.assetType != 'host':
        JobVerifyHelper(job=job).enable_asset_task()
    return True, job


def add_host_asset(data, uid, username):
    """添加主机资产"""
    alert_group = AlertGroup.objects.filter(uid=uid, isDefault=True).first()
    if not alert_group:
        return False, '未设置告警组'

    alert_group_id = str(alert_group.id)
    flag, param = host_asset_param_prepare(data)
    if not flag:
        return False, '当前主机资产只支持IPv4'
    asset_type = 'host'
    exists = Job.objects.find({'uid': uid, 'sourceIp': param['sourceIp'], 'assetType': asset_type})
    if exists:
        return False, '当前主机资产已添加'

    balance = Balances.objects.find_one({'uid': uid})
    if not balance:
        return False, '无关联余额信息'

    if balance.balance.host.used >= balance.balance.host.total:
        return False, '资产数量已到达套餐上限'

    job_id, site_portrait_id = ObjectId(), ObjectId()
    job = _add_job(param, uid, job_id, asset_id='', site_portrait_id=site_portrait_id, alert_group_id=alert_group_id,
                   username=username, asset_type=asset_type, is_verified=data.get('is_verified', False))
    balance_task_count_sync(uid, balance)

    return True, job


def inner_api(api, *args, **kwargs):
    """需要调用内部 api"""
    url = SERVICE_API + api.lstrip('/')
    resp = requests.post(url, *args, **kwargs)
    return resp


def del_asset(uid, job_ids, api=False):
    """删除资产"""
    jobs = Job.objects.filter(id__in=job_ids, uid=uid)
    if not jobs:
        return 0

    target_urls = [job.targetUrl for job in jobs]
    target_note_url = [f'资产名称: {job.note}, 资产地址: {job.targetUrl}' for job in jobs]

    # 删除关联云防御加黑
    cookie = request.headers.environ.get('HTTP_COOKIE', '')
    for job in jobs:
        try:
            inner_api('/api/v2/cloud/udb_del_all', json={'job_id': str(job.pk)}, headers={'cookie': cookie})
        except Exception as e:
            pass

    AssetTasks.objects.filter(jobId__in=job_ids, uid=uid).delete()
    task_ids = Task.objects.filter(jobId__in=job_ids, uid=uid).distinct('taskId')
    if task_ids:
        scanner.BatchRemoveTask(task_ids).batch_remove()

    Asset.objects.filter(jobId__in=job_ids, uid=uid).delete()
    Task.objects.filter(jobId__in=job_ids, uid=uid).delete()
    TaskSpiderUrl.objects.filter(jobId__in=job_ids, uid=uid).delete()
    ChangeCheckBasepage.objects.filter(job_id__in=job_ids, uid=uid).delete()
    Alert.objects.filter(jobId__in=job_ids, uid=uid).delete()
    jcount = Job.objects.filter(id__in=job_ids, uid=uid).delete()
    exists_targets = Job.objects.filter(targetUrl__in=target_urls, uid=uid).distinct('targetUrl')

    delete_site_portraits = set(target_urls).difference(set(exists_targets))
    SitePortRaits.objects.filter(target__in=delete_site_portraits, uid=uid).delete()
    SitePortRaitTask.objects.filter(target__in=delete_site_portraits, uid=uid).delete()
    IgnoreWarning.objects.filter(jobId__in=job_ids).delete()

    if jcount:
        balance = Balances.objects.find_one({'uid': uid})
        balance_task_count_sync(uid, balance)

        log_info = {
            'affects': target_urls,
            'actionMessage': '删除资产' if not api else "API删除资产",
            'detail': f'{"API" if api else ""}删除资产: {", ".join(target_note_url)}'
        }
        DAssetLog(**log_info).info(log_info['actionMessage'], log_info['detail'])

    return jcount


def switch_job_tasks(job, enable):
    """
    开启或者关闭资产所有周期监测任务
    """
    job.reload()
    if enable and not job.verification.isVerified:
        return False, "没有认证的资产不能开启监测任务"
    asset_tasks = AssetTasks.objects.filter(
        jobId=job.id, triggerType=TaskTriggerType.schedule.value, enableMonitor__ne=enable)
    if not asset_tasks:
        return False, f"资产 {job.targetUrl} 没有需要修改的监测任务"
    balance = Balances.objects.filter(uid=job.uid).first()
    is_error_balance = (not balance) or (not balance.planExpireTime) or (balance.planExpireTime < datetime.datetime.utcnow())
    if is_error_balance and enable:
        return False, "套餐已经过期, 不能开启监测任务"

    for at in asset_tasks:
        at.enableMonitor = enable
        if at.taskGroupType in [AssetTaskGroup.http.value, AssetTaskGroup.ping.value]:
            setting = getattr(at, f"{at.taskGroupType}Settings")
            setting.enable = enable
            if job_settings := getattr(job, f"{at.taskGroupType}Settings"):
                for s in job_settings:
                    if s.target == setting.target:
                        s.enable = enable
                        break
        at.save()
        job.save()
        if enable:
            enable_task_group(asset_task=at, insert=True)
        else:
            enable_task_group(asset_task=at)
        balance_task_count_sync(job.uid, balance)
    return True, "更新成功"


def update_asset(uid, job_id, update, api=False):
    job = Job.objects.filter(uid=uid, id=job_id).first()
    if not job:
        return False, "资产不存在"

    # 统一 sourceIp 格式
    if job.sourceIp is None:
        job.sourceIp = ""

    new_update = {}
    for field in update:
        field_v = getattr(job, field, None)
        if field_v != update[field]:
            new_update[field] = update[field]

    # 暂不支持二次开发API修改源站IP
    if 'sourceIp' in new_update and api:
        del new_update['sourceIp']
    elif 'sourceIp' in new_update:
        other_job_query = {"uid": uid, "id__ne": job.id, "targetUrl": job.targetUrl}
        if not new_update["sourceIp"]:
            other_job_query.update({"sourceIp__in": [None, ""]})
        else:
            other_job_query.update({"sourceIp": new_update["sourceIp"]})
        if Job.objects.filter(**other_job_query).first():
            return False, "已经存在相同的资产地址和源站IP"
        # 资产设置为未认证状态
        new_update['verification__isVerified'] = False
    if not new_update:
        return False, "没有任何更新"

    job.update(**new_update)
    # 关闭监测任务
    if new_update.get('verification__isVerified') is False:
        switch_job_tasks(job, False)

    log_detail = f'{"API" if api else ""}更新资产，资产地址: {job.targetUrl}, '
    if 'note' in new_update:
        log_detail += f'资产名称:{job.note} -> {new_update["note"]}'
    if 'sourceIp' in new_update:
        log_detail += f'源站IP:{job.sourceIp} -> {new_update["sourceIp"]}'

    log_info = {
        'affects': [job.targetUrl],
        'actionMessage': '更新资产' if not api else "API更新资产",
        'detail': log_detail
    }
    UAssetLog(**log_info).info(log_info['actionMessage'], log_info['detail'])
    return job, "更新成功"


def update_asset_status(uid, jobId, update_field):
    job = Job.objects.filter(uid=uid, id=jobId).first()
    if not job:
        return False
    job.update(**update_field)

    if 'enableMonitor' in update_field:
        name = '监测配置'
        action = '开启' if update_field['enableMonitor'] else '关闭'
    else:
        name = '告警配置'
        action = '开启' if update_field['enableAlert'] else '关闭'

    log_info = {
        'affects': [job.targetUrl],
        'actionMessage': '更新监测配置',
        'detail': f'更新资产{job.note}，{action} {name}'
    }
    UJobLog(**log_info).info(log_info['actionMessage'], log_info['detail'])
    return True


def config_single(uid, jobId, data):
    """资产 单次任务"""
    task_type_settings = {
        'asset': 'assetcollect',
        'content': 'contentcollect',
        'securityEvent': 'securityEventcollect',
        'ssl': 'sslcollect',
        'vul': 'vulcollect'
    }
    taskTypes = [k for k, v in data['alertSettings']['enable'].items() if v]

    for taskType in taskTypes:
        _update_balance(uid, f'inc__balance__{taskType}ManualTask__used', increment=-1)  # 余额

        req_json = {
            'alertSettings': data['alertSettings'],
            'taskSettings': data[task_type_settings[taskType]],
            'jobId': str(jobId),
            'enableMonitor': True,
            'immediateExec': True
        }
        cookie = request.headers.get('cookie')
        resp = inner_api(f'/api/v2/jobs/{taskType}/manual', json=req_json, headers={'cookie': cookie})


def config_period(uid, jobId, data):
    task_types = [
        'asset',
        'content',
        'securityEvent',
        'ssl',
        'vul',
        'http',
        'ping'
    ]
    job = Job.objects.find(pk=jobId, uid=uid).first()
    if not job:
        return

    if data.get('enableAlert') is not None:
        job.enableAlert = data['enableAlert']

    if data.get('enableMonitor') is not None:
        job.enableMonitor = data['enableMonitor']

    taskTypes = [k for k, v in data['alertSettings']['enable'].items() if v and v in task_types]
    cookie = request.headers.get('cookie')

    for taskType in taskTypes:
        task_type_setting = data.get(f'{taskType}Settings')
        pre_task_type_setting = getattr(job, f'{taskType}Settings')

        req_json = {
            'alertSettings': data['alertSettings'],
            'taskSettings': data[task_type_setting] or pre_task_type_setting.to_mongo().to_dict(),
            'jobId': str(jobId),
            'enableMonitor': True,
            'immediateExec': False,
            'cookie': cookie,
            'sourceIp': data.get('sourceIp') or job.sourceIp,
            'customMonitorPage': data.get('customMonitorPage') or job.customMonitorPage
        }
        inner_api(f'/api/v2/jobs/{taskType}/schedule', json=req_json, headers={'cookie': cookie})

    for taskType in taskTypes:
        task_type_setting = data.get(f'{taskType}Settings')
        if not task_type_setting:
            continue
        setattr(job, f'{taskType}Settings', task_type_setting)
    job.save()


def async_batch_add_asset(data_list, asset_type):
    username_list = [str(item.get('username')) for item in data_list]
    users = User.objects.find({"username": {"$in": username_list}})
    user_maps = {user.username: user.pk for user in users}

    asset_type_cn = '主机资产' if asset_type == 'host' else 'Web资产'

    result = {'summary': {}, 'detail': [], 'success': []}
    for data in data_list:
        note = str(data.get('note')) if data.get('note') else ""
        target_url = data.get('target_url')
        source_ip = data.get('source_ip') if data.get('source_ip') else ""
        username = str(data.get('username'))
        is_verified = data.get('is_verified', False)
        uid = user_maps.get(username)

        if asset_type == 'host':
            source_ip = target_url

        if (not note) or (not NAME_RE.fullmatch(note)):
            message = "名称格式错误，只能输入1～50个中文、英文字母、数字、下划线"
            result['detail'].append({"note": note, "target_url": target_url, "source_ip": source_ip,
                                     "asset_type": asset_type_cn, "reason": message})
            continue

        if not target_url:
            message = "资产地址不能为空"
            result['detail'].append({"note": note, "target_url": target_url, "source_ip": source_ip,
                                     "asset_type": asset_type_cn, "reason": message})
            continue
        if not username:
            message = "用户名不能为空"
            result['detail'].append({"note": note, "target_url": target_url, "source_ip": source_ip,
                                     "asset_type": asset_type_cn, "reason": message})
            continue
        if not uid:
            message = "用户不存在"
            result['detail'].append({"note": note, "target_url": target_url, "source_ip": source_ip,
                                     "asset_type": asset_type_cn, "reason": message})
            continue

        if asset_type == 'host' or source_ip:
            status, message = check_source_ip(source_ip)
            if not status:
                result['detail'].append({"note": note, "target_url": target_url, "source_ip": source_ip,
                                         "asset_type": asset_type_cn, "reason": message})
                continue

        web_target_url = None
        if asset_type == 'web':
            try:
                web_target_url = TargetUrl(url=target_url)
            except Exception:
                message = "资产地址格式错误"
                result['detail'].append({"note": note, "target_url": target_url, "source_ip": source_ip,
                                         "asset_type": asset_type_cn, "reason": message})
                continue

        if asset_type == 'host':
            temp_data = {'note': note, 'sourceIp': target_url, 'assetType': 'host', 'is_verified': is_verified}
            status, message = add_host_asset(temp_data, uid, username)
        else:
            temp_data = {'note': note, 'sourceIp': source_ip, 'url': web_target_url.url, 'is_verified': is_verified}
            status, message = add_asset(temp_data, uid, username)

        if not status:
            result['detail'].append({"note": note, "target_url": target_url, "source_ip": source_ip,
                                     "asset_type": asset_type_cn, "reason": message})
        else:
            result['success'].append({"note": note, "target_url": target_url, "source_ip": source_ip,
                                      "asset_type": asset_type_cn, "reason": "添加成功"})

    for item in result['detail']:
        item['status'] = 'error'
        item['value'] = item['target_url']
        item['message'] = item['reason']

    total = len(data_list)
    fail = len(result['detail'])
    code = 100 if fail else 0
    success = total - fail
    message = '资产添加失败' if fail else '资产添加成功'
    data = {
        "data": {
            "data": result['detail'],
            "static": {"failed": fail, "success": success, "total": total},
        },
        "message": message,
        "status": "error" if fail else "success",
    }

    if asset_type == 'host':
        opt_fail_log_details = [f'资产地址: {item["target_url"]}, 结果: {item["reason"]}' for item in result["detail"]]
        opt_success_log_details = [item.get("target_url") for item in result["success"]]
    else:
        opt_fail_log_details = [f'资产地址: {item["source_ip"]}, 结果: {item["reason"]}' for item in result["detail"]]
        opt_success_log_details = [item.get("source_ip") for item in result["success"]]

    detail = "批量添加{}, ".format(asset_type_cn)
    if opt_success_log_details:
        detail += "添加成功的资产地址: {}".format(",".join(opt_success_log_details))
    if opt_fail_log_details:
        detail += ", 添加失败的详情: {}".format("; ".join(opt_fail_log_details))

    log_info = {
        'affects': "",
        'actionMessage': '批量添加{}'.format(asset_type_cn),
        'detail': detail
    }
    CAssetLog(**log_info).info(log_info['actionMessage'], log_info['detail'])

    if "success" in result:
        del result["success"]

    return code, message, data


def admin_asset_list(query):
    """资产列表"""
    resp_fields = [
        'id', 'assetId', 'enableAlert', 'enableMonitor', 'note', 'sourceIp',
        'target', 'targetUrl', 'verification', 'uid', 'createTime',
        'securityStatus', 'isSupportIpv6',  "enable_proxy_ip", "proxy_ip_result",
    ]
    for t in WS_TASK_GROUP_TUPLE:
        resp_fields.append(f"{t}ResultId")

    username = ''
    if query.get('username') is not None:
        username = query['username']

    package = ''
    if query.get('package') is not None:
        package = query['package']

    user_id_list = []
    if username and package:
        packages = Packages.objects.find({"name": {'$regex': re.escape(package)}})
        package_ids = set([package.pk for package in packages])

        uses = User.objects.find({"username": {'$regex': re.escape(username)}})
        user_id_list = [user.pk for user in uses if user.planId in package_ids]
    else:
        if username:
            uses = User.objects.find({"username": {'$regex': re.escape(username)}})
            user_id_list = [user.pk for user in uses]

        if package:
            packages = Packages.objects.find({"name": {'$regex': re.escape(package)}})
            package_id_list = [package.pk for package in packages]
            uses = User.objects.find({"planId": {"$in": package_id_list}})
            user_id_list = [user.pk for user in uses]

    asset_type = query['asset_type']
    q = {'assetType': asset_type}

    if asset_type != 'host':
        q['assetType'] = {'$ne': 'host'}

    if username or package:
        q['uid'] = {"$in": user_id_list}

    if query.get('is_verified') is not None:
        q['verification.isVerified'] = query.get('is_verified')

    if query.get('enable_proxy_ip') is not None and query.get('enable_proxy_ip'):
        q['enable_proxy_ip'] = True

    if proxy_ip := query.get("proxy_ip"):
        q.update({"proxy_ip_result.ip": {"$regex": proxy_ip}})

    if query.get('note') and query.get('note') is not None:
        q['note'] = {'$regex': re.escape(query['note'])}
    if query.get('target_url') and query.get('target_url') is not None:
        q['targetUrl'] = {'$regex': re.escape(query['target_url'])}
    if query.get('source_ip') and query.get('source_ip') is not None:
        q['sourceIp'] = {'$regex': re.escape(query['source_ip'])}
    if query.get('defense_target_id') is not None and query.get('defense_target_id'):
        q['defense_target_id'] = {"$exists": True}
    if query.get('level') and query.get('level') is not None:
        q['securityStatus.level'] = query['level']

    is_support_ipv6 = query.get('is_support_ipv6')
    if is_support_ipv6 and is_support_ipv6 is not None:
        is_support_ipv6_str = {"未知": "unknown", "支持": "support", "不支持": "nonsupport"}.get(is_support_ipv6, "unknown")
        if is_support_ipv6_str == "unknown":
            q['isSupportIpv6'] = {"$nin": ["support", "nonsupport"]}
        else:
            q['isSupportIpv6'] = is_support_ipv6_str

    start_time = ''
    if query.get('start_time') and query.get('start_time') is not None:
        start_time = datetime.datetime.strptime(query['start_time'], "%Y-%m-%d %H:%M:%S")

    end_time = ''
    if query.get('end_time') and query.get('end_time') is not None:
        end_time = datetime.datetime.strptime(query['end_time'], "%Y-%m-%d %H:%M:%S")

    if start_time and end_time:
        q['$and'] = [{'createTime': {'$gte': start_time}}, {'createTime': {'$lte': end_time}}]
    else:
        if start_time:
            q['createTime'] = {"$gt": start_time}
        elif end_time:
            q['createTime'] = {"$lt": end_time}
    monitor_type = query.get('monitor_type')
    task_level = query.get('task_level', None)
    if monitor_type and asset_type != "host" and Score.check_level_num(task_level):
        q.update({f"{monitor_type}ResultId": {"$exists": 1, "$nin": ["", None]}})
    page, count = query['page'], query['perPage']
    sort = ['-id']
    results, total = query_asset_list(Job.objects, q, resp_fields, page, count, sort, monitor_type, task_level)

    uid_list = [item.get('uid') for item in results]
    if uid_list:
        user_maps, package_maps, user_package_maps = get_user_packages(uid_list)
        for item in results:
            uid = item.get('uid')
            item['username'] = user_maps.get(uid)
            package_id = user_package_maps.get(uid)
            if package_id:
                item['package'] = package_maps.get(package_id)

    page_info = {
        'currentPage': page,
        'hasNextPage': True if page * count < total else False,
        'hasPreviousPage': True if page > 1 else False,
        'itemCount': total,
        'pageCount': ((total - 1) // count) + 1,
        'perPage': count,
    }
    return {'results': results, 'count': total, 'pageInfo': page_info}


def get_asset_list_csv(jobs):
    data_list = []
    for job in jobs:
        is_verified = '已认证' if job['is_verified'] else '未认证'

        try:
            create_time = job['create_time'].strftime("%Y-%m-%d")
        except Exception as e:
            create_time = ''

        item = [job['note'], job['target_url'], job['source_ip'], is_verified, job['level'], job['is_support_ipv6'],
                job['username'], job['package'], create_time]
        data_list.append(item)

    csv_header = [u'资产名称', u'资产地址', u'源站 IP', u'认证状态', u'风险等级', u'IPv6状态', u'所属用户', u'套餐', u'创建时间']
    return {"csv_header": csv_header, "data_list": data_list}
