import re
import json
import datetime
import arrow
import copy
import logging

from app.libs import scanner
from app.db.models.assets import Asset
from app.db.models.alert_groups import AlertGroup
from app.libs.site_utils import domain_syntax_check
from urllib.parse import urlparse, urljoin
from app.libs.ip import is_ipv4
from app.errors import AddJobError, TaskError
from app.db.models.balances import Balances
from app.db.models.jobs import Job
from app.db.models.users import User
from app.db.models.tasks import Task
from app.db.models.asset_tasks import AssetTasks
from app.libs.oms import Client
from app.libs.utility import get_default_area
from app.libs.enums import (
    TaskType, SPEED_CONCURRENCY_MAP,
    TaskTriggerType, ExTaskType, WS_TASK_GROUP_TUPLE
)

logger = logging.getLogger()


def get_valid_http_settings_by_job(job, balance=None):
    if not balance:
        balance = Balances.objects.filter(uid=job.uid).first()

    if not balance or not job.httpSettings:
        return []

    # 检查httpMaxTask
    index = balance.balance.httpMaxTask.total - \
            balance.balance.httpMaxTask.used
    index = index if index > 0 else 0
    # 如果httpSettings的数量大于可用的数量, 则截取可用数量
    if job.httpSettings.count() > index:
        return job.httpSettings[:index]
    else:
        return job.httpSettings


def get_job(job_id, **kwargs):
    if not job_id:
        return None
    job = Job.objects.filter(pk=job_id, **kwargs).first()
    user = User.objects.filter(pk=job.uid).first()
    params = {
        'dns_id': job.dns_id,
        'site_id': job.site_id,
        'asset': job.targetUrl,
        'username': user.username
    }

    resp = Client().post("/api/inner/dnss/get_dns_id/", json.dumps(params)).get('data', {})
    dns_id = resp.get('dns_id')
    site_id = resp.get('site_id')
    if not dns_id or not site_id:
        return None

    job.update(dns_id=dns_id, site_id=site_id)
    job.reload()

    return job


def get_url_path_params(url):
    if not url.startswith('http'):
        url = f"https://{url}"
    u_parse = urlparse(url)
    url = f"{u_parse.path}" if u_parse.path else '/'
    if u_parse.query:
        url = f"{url}?{u_parse.query}"
    return url


def schedule_setting(scheduler, job, setting, task_id='', target='', trigger_type='schedule',
                     is_disabled=None, is_periodic=None, immediate_exec=False, index=None,
                     asset_task=None, **kwargs):
    """下发周期任务
    Args:
        scheduler ([type]): 调度器
        job (Job): 实例化的Job
        setting ([type]): 通过模型校验的setting
        task_id (str, optional): 调度的任务id. Defaults to ''.
        target (str, optional): 目标. Defaults to ''.
        trigger_type (str, optional): 任务类型. Defaults to 'schedule'.
        index ([type], optional): 可用性任务需要传入的索引index. Defaults to None.
        is_disabled: ([type], optional): 是否禁用任务. 如果不传入, 则自动计算,
        is_periodic: ([type], optional): 是否是周期任务. 如果不传入, 则自动计算,
        immediate_exec: ([type], optional): 是否立即执行. Defaults to False,
        asset_task: (AssetTask)  任务组

    Returns:
        [type]: [description]
    """

    target = target if target else job.targetUrl

    if trigger_type in ['schedule', 'scheduleIncrement']:
        is_periodic = True if is_periodic is None else is_periodic
        is_disabled = job.is_disabled(setting.enable) if is_disabled is None else is_disabled
    else:
        is_periodic = False if is_periodic is None else is_periodic
        is_disabled = False if is_disabled is None else is_disabled

    if asset_task:
        is_disabled = asset_task.is_disabled(getattr(setting, "enable", True))

    # 如果web漏洞任务开启了网站画像, 则停用启周期任务
    if getattr(setting, "enableSitePortraitTriggerMonitor", False) and \
            trigger_type != "manual" and kwargs.get("task_type", "") == "vul":
        is_disabled = True

    # 处理网站画像形式下发的自定义任务. 将自定义任务的出发类型改成周期增量.
    if getattr(setting, 'enableSitePortraitTriggerMonitor', False):
        trigger_type = 'scheduleIncrement'

    # 处理名称, 如果setting.name有值,则取setting.name作为任务名称, 否则采用job.note
    if getattr(setting, 'name', ''):
        name = setting.name
    else:
        name = asset_task.name if asset_task else job.note

    is_defense = kwargs.get("is_defense", False)
    if is_defense:
        trigger_type = "manual"
        name = "源站安全检测"
        is_disabled = False
        is_periodic = False
        immediate_exec = True

    # 验证一下用户套餐是否过期
    balance = Balances.objects.filter(uid=job.uid).first()
    is_error_balance = (not balance) or (not balance.planExpireTime) or (balance.planExpireTime < datetime.datetime.utcnow())
    if (not is_defense) and is_error_balance and (not is_disabled):
        raise TaskError(msg='套餐已过期或者是免费套餐，不能下发任务')

    insert = kwargs.get("insert", True)

    # 处理 addition
    task_type = scheduler.task_type
    ref_type = 'asset' if task_type == 'asset' else 'job'
    if ref_type == 'job':
        ref_id = str(asset_task.pk) if asset_task else str(job.pk)
    else:
        asset = Asset.objects.get_or_404(pk=job.assetId)
        ref_id = str(asset.pk)

    prelogin = kwargs.get('prelogin', {})

    task_settings = setting.to_dict()
    include_urls = task_settings.get("collect", {}).get("includeUrl", [])
    if task_type in [TaskType.change_check.value, TaskType.content.value] and include_urls:
        urls = [urljoin(job.targetUrl, url).__str__() for url in include_urls]
        task_settings["collect"]["includeUrl"] = urls
    ws_task_time = {}
    if asset_task and asset_task.taskGroupType == "web_security":
        ws_task_time = getattr(asset_task, "ws_task_time", {})
        ws_task_time = ws_task_time if isinstance(ws_task_time, dict) else ws_task_time.to_mongo().to_dict()
        task_settings.update(ws_task_time)

    if asset_task and getattr(asset_task, 'scanSpeedConfig', {}):
        task_settings.update({'scanSpeedConfig': asset_task.scanSpeedConfig.to_dict()})

    enable_proxy_ip = getattr(job, "enable_proxy_ip", False) and task_type in ['vul', 'ssl', 'securityEvent', 'content']

    addition = {
        'name': name,
        'taskSettings': task_settings,
        'target': target,
        'domain': job.target.hostname,
        'triggerType': trigger_type,
        'sourceIp': job.sourceIp,
        'taskIdx': 0 if index is None else index,
        'jobId': str(job.pk),
        'uid': str(job.uid),
        'taskType': task_type,
        'refType': ref_type,
        'refId': ref_id,
        'prelogin': prelogin.to_dict() if hasattr(prelogin, 'to_dict') else prelogin
    }

    task = None
    # 如果addition有传入pk会被当做重试任务
    if pk := kwargs.get('addition', {}).get('pk'):
        addition['pk'] = pk
        task = Task.objects.get_or_404(pk=pk)
        addition['startTime'], created, status, progress = arrow.Arrow.fromdatetime(
            task.startTime).for_json(), False, task.status, task.progress
        task.update(status='waiting', progress=0)
    # 自定义任务或者专项任务
    elif trigger_type in ['manual', 'special']:
        _addition = copy.deepcopy(addition)
        if alert_setting := kwargs.pop('alert_setting', None):
            _addition['alertSettings'] = alert_setting.to_dict()
        task = Task.from_dict(_addition, created=True)
        created, status, progress = True, 'waiting', 0
        task.status = 'waiting'
        task.progress = 0
        task.startTime = arrow.utcnow().datetime if not ws_task_time.get("startAt") \
            else arrow.get(ws_task_time.get("startAt")).datetime
        task.save()
        addition['pk'] = str(task.pk)
    # IPv6合规不下发源站IP
    origin_ip = '' if task_type == 'ipv6' else job.sourceIp
    try:
        scheduler = scheduler(user_id=str(job.uid)).init_from_setting(
            setting,
            task_id=task_id,
            target_url=target,
            is_disabled=is_disabled,
            is_periodic=is_periodic,
            origin_ip=origin_ip,
            addition=addition,
            immediate_exec=immediate_exec,
            job_id=job.pk.__str__(),
            prelogin=kwargs.get('prelogin', {}),
            custom_monitor_urls=kwargs.get('custom_monitor_urls', {}),
            is_enable_concurrency_limit=kwargs.get('is_enable_concurrency_limit', False),
            task_concurrency=kwargs.get('task_concurrency', 0),
            enable_proxy_ip=enable_proxy_ip
        )
        # 如果是周期任务且立即执行,  则调用强制rescan的接口， 只有开启监测的周期(is_periodic=True and is_disabled=False)任务有效
        if immediate_exec and is_periodic and task_id and not is_disabled:
            scheduler.rescan(is_force=True)
        scheduler.auto_schedule(insert=insert)
        if task:
            task.update(taskId=scheduler.task_id)
        if asset_task and task:
            _setting = getattr(asset_task, f"{task.taskType}Settings")
            _setting.taskId = scheduler.task_id
            asset_task.save()

    except Exception as e:
        logger.exception(e)
        # 如果是创建的则删除
        if task and created:
            task.delete()
        # 如果task是读取的, 则修改为之前的状态
        elif task and not created:
            task.update(status=status, progress=progress)
        raise TaskError(msg='任务下发失败')
    return scheduler.task_id


def remove_scheduler_task(scheduler, task_ids):
    """批量移除调度的task

    Args:
        scheduler ([type]): [description]
        task_ids ([type]): [description]
    """
    batch_remove_scheduler = scheduler(
        task_ids=set(task_ids))
    batch_remove_scheduler.batch_remove()


def schedule_http(job, setting, immediate_exec: bool = True, **kwargs):
    if asset_task := kwargs.get("asset_task"):
        task_id = asset_task.httpSettings.taskId
    else:
        task_id = ""
    index = len(job.httpSettings)
    task_id = schedule_setting(
        scanner.HttpScheduler,
        job, setting,
        target=setting.target,
        immediate_exec=immediate_exec,
        task_id=task_id,
        index=index,
        **kwargs
    )
    setting.taskId = task_id


def schedule_ping(job, setting, immediate_exec: bool = True, **kwargs):
    if asset_task := kwargs.get("asset_task"):
        task_id = asset_task.pingSettings.taskId
    else:
        task_id = ""
    task_id = schedule_setting(
        scanner.PingScheduler,
        job, setting,
        target=setting.target,
        immediate_exec=immediate_exec,
        task_id=task_id,
        index=0,
        **kwargs)
    setting.taskId = task_id


def schedule_vul(job, settings, immediate_exec: bool = True, **kwargs):
    if kwargs.get("is_manual"):
        trigger_type = "manual"
    else:
        # 处理增量还是全量
        if settings.collect.type == 'full':
            trigger_type = 'schedule'
        else:
            trigger_type = 'scheduleIncrement'
    if asset_task := kwargs.get("asset_task"):
        task_id = asset_task.vulSettings.taskId
    else:
        task_id = job.vulSettings.taskId

    task_id = schedule_setting(
        scanner.VulScheduler,
        job, settings,
        trigger_type=trigger_type,
        immediate_exec=immediate_exec,
        is_disabled=False,
        task_id=task_id,
        **kwargs
    )
    settings.taskId = task_id


def schedule_security_event(job, settings, immediate_exec: bool = True, **kwargs):
    trigger_type = "manual" if kwargs.get("is_manual") else "schedule"
    if asset_task := kwargs.get("asset_task"):
        task_id = asset_task.securityEventSettings.taskId
    else:
        task_id = job.securityEventSettings.taskId
    task_id = schedule_setting(
        scanner.SecurityEventScheduler,
        job, settings,
        trigger_type=trigger_type,
        immediate_exec=immediate_exec,
        task_id=task_id,
        **kwargs
    )
    settings.taskId = task_id


def schedule_content(job, settings, immediate_exec: bool = True, **kwargs):
    trigger_type = "manual" if kwargs.get("is_manual") else "schedule"
    if asset_task := kwargs.get("asset_task"):
        task_id = asset_task.contentSettings.taskId
    else:
        task_id = job.contentSettings.taskId
    task_id = schedule_setting(
        scanner.ContentScheduler,
        job, settings,
        trigger_type=trigger_type,
        immediate_exec=immediate_exec,
        task_id=task_id,
        **kwargs
    )
    settings.taskId = task_id


def schedule_ssl(job, settings, immediate_exec: bool = True, **kwargs):
    trigger_type = "manual" if kwargs.get("is_manual") else "schedule"
    if asset_task := kwargs.get("asset_task"):
        task_id = asset_task.sslSettings.taskId
    else:
        task_id = job.sslSettings.taskId
    task_id = schedule_setting(
        scanner.SslScheduler,
        job, settings,
        trigger_type=trigger_type,
        immediate_exec=immediate_exec,
        task_id=task_id,
        **kwargs
    )
    settings.taskId = task_id


def schedule_change_check(job, settings, immediate_exec: bool = True, **kwargs):
    if asset_task := kwargs.get("asset_task"):
        task_id = asset_task.changeCheckSettings.taskId
    else:
        task_id = job.changeCheckSettings.taskId
    task_id = schedule_setting(
        scanner.ChangeCheckScheduler,
        job, settings,
        trigger_type="schedule",
        immediate_exec=immediate_exec,
        task_id=task_id,
        **kwargs
    )
    settings.taskId = task_id


def schedule_host_vul(job, asset_task, settings, immediate_exec: bool = True, **kwargs):
    task_id = schedule_setting(
        scanner.HostVulScheduler,
        job, settings,
        immediate_exec=immediate_exec,
        task_id=asset_task.hostVulSettings.taskId,
        asset_task=asset_task,
        **kwargs
    )
    settings.taskId = task_id


def schedule_asset(job, asset, settings, immediate_exec: bool = True, **kwargs):
    trigger_type = "manual" if kwargs.get("is_manual") else "schedule"
    if asset_task := kwargs.get("asset_task"):
        task_id = asset_task.assetSettings.taskId
    else:
        task_id = asset.taskSettings.taskId
    task_id = schedule_setting(
        scanner.AssetScheduler,
        job, settings,
        trigger_type=trigger_type,
        immediate_exec=immediate_exec,
        task_id=task_id,
        **kwargs
    )
    settings.taskId = task_id


def check_target_url(target_url: str):
    if not target_url:
        raise AddJobError(msg='资产地址不能为空')

    try:
        if (protocol := target_url.split(':')[0] + ':') not in ['https:', 'http:']:
            raise AddJobError(msg='资产地址协议错误')
    except Exception:
        raise AddJobError(msg='资产地址协议错误')

    # 校验地址合法性
    # 校验ip地址
    if ip := re.findall(r"\b(?:[0-9]{1,3}\.){3}[0-9]{1,3}\b", target_url):
        ip = ip[0]
        _is_ipv4, reason = is_ipv4(ip)
        if not _is_ipv4:
            raise AddJobError(msg=reason)
        domain, hostname = None, ip
        ipType = 4
    # 校验域名地址
    else:
        domain, hostname = domain_syntax_check(target_url)
        if not domain or not hostname:
            raise AddJobError(msg='资产地址错误')
        ipType = 0

    # 获取port
    u_p = urlparse(target_url)
    port = u_p.port

    host = f'{hostname}:{port}' if port else hostname
    target_url = f'{protocol}//{host}'

    if not port:
        port = 443 if protocol == 'https:' else 80

    return host, hostname, target_url, ipType, protocol, port, domain


def create_job(uid: str, target_url: str, note: str, source_ip: str, enable_monitor: bool = False,
               enable_alert: bool = False, import_type='', defense_target_id=None):
    """增加资产

    Args:
        uid (str): 用户ID
        target_url (str): 资产地址
        note (str): 资产名称
        source_ip (str): 源站IP
        enable_monitor (bool, optional): 是否开启监测. Defaults to False.
        enable_alert (bool, optional): 是否开启告警. Defaults to False.
        import_type (str, optional): 导入类型. Defaults to ''.
        defense_target_id (ObjectId,  optional) 源站安全检测记录ID
    """

    host, hostname, target_url, ipType, protocol, port, domain = check_target_url(target_url)
    user = User.objects.get_or_404(pk=uid)
    if not (alert_group_default := AlertGroup.objects.filter(uid=uid, isDefault=True).first()):
        raise AddJobError(msg='未设置告警组')

    area_dict = get_default_area(target_url, source_ip)
    area = area_dict['area']
    area_ipv6 = area_dict['area_ipv6']

    # assets
    asset = Asset()
    asset.taskSettings['target'] = target_url
    asset.alertSettings['alertGroupIds'] = [alert_group_default.id.__str__()]
    asset.uid = uid
    asset.target = target_url

    # job
    job = Job()
    job.enableAlert = enable_alert
    job.enableMonitor = enable_monitor
    job.note = note
    job.alertSettings.alertGroupIds.append(alert_group_default.id.__str__())
    job.uid = uid
    job.targetUrl = target_url
    job.target.protocol = protocol
    job.target.host = host
    job.target.hostname = hostname
    job.target.port = port
    job.target.ipType = ipType
    job.target.eTLD1 = domain
    job.sourceIp = source_ip
    job.httpSettings[0].target = target_url
    job.httpSettings[0].sourceIp = source_ip
    job.httpSettings[0].collect.area = area
    job.httpSettings[0].collect.area_ipv6 = area_ipv6
    job.pingSettings[0].target = target_url
    job.pingSettings[0].sourceIp = source_ip
    job.pingSettings[0].collect.area = area
    job.pingSettings[0].collect.area_ipv6 = area_ipv6
    job.import_type = import_type
    # 云防御资产
    try:
        oms_client = Client()
        resp = oms_client.get('/api/inner/user_domain_valid/', params={
            'username': user.username,
            'domain': target_url})

        if resp.get('msg', '') == 'success':
            job.verification.isVerified = True
            job.cydEnabled = True

    except Exception as e:
        pass

    if defense_target_id:
        job.defense_target_id = defense_target_id
        job.verification.isVerified = True
        job.cydEnabled = True

    # http 资产默认关闭ssl监测
    if job.target.protocol == 'http:':
        job.sslSettings.enable = False

    if job.verification.isVerified is True and not defense_target_id:
        job.httpSettings = get_valid_http_settings_by_job(job)

    asset.validate()
    job.validate()
    asset.save()
    job.assetId = asset.pk
    job.save()

    Asset.objects(pk=asset.pk).update_one(jobId=job.pk)
    Balances.objects(uid=uid).update_one(inc__balance__domain__used=1)
    return job, asset


def get_prelogin_page(asset_task, form):
    if asset_task:
        custom_urls_data = handle_custom_urls_data(asset_task.customMonitorPage.to_dict())
        prelogin = asset_task.prelogin.to_dict()
        alert_setting = asset_task.alertSettings
    else:
        custom_urls_data = handle_custom_urls_data(form.customMonitorPage.data)
        prelogin = form.prelogin
        alert_setting = form.alert_setting
    return custom_urls_data, prelogin, alert_setting


def enable_monitor(job, asset, immediate_exec=False):
    schedule_ping(job, job.pingSettings, immediate_exec=immediate_exec)
    schedule_http(job, job.httpSettings, immediate_exec=immediate_exec)
    schedule_security_event(job, job.securityEventSettings, immediate_exec=immediate_exec)
    if job.targetUrl.startswith('https'):
        schedule_ssl(job, job.sslSettings, immediate_exec=immediate_exec)
    schedule_asset(job, asset, asset.taskSettings, immediate_exec=immediate_exec)
    schedule_vul(job, job.vulSettings, immediate_exec=immediate_exec)


def enable_web_sec_task(asset_task, job=None, immediate_exec=False, **kwargs):
    job = Job.objects.filter(id=asset_task.jobId).first() if not job else job
    prelogin = asset_task.prelogin.to_dict()
    custom_urls_data = handle_custom_urls_data(asset_task.customMonitorPage.to_dict())
    is_enable_concurrency_limit, task_concurrency = parse_speed_config(asset_task)
    is_manual = False if asset_task.triggerType != TaskTriggerType.manual.value else True
    immediate_exec = True if (is_manual and not asset_task.ws_task_time.startAt) else immediate_exec
    if asset_task.changeCheckSettings.enable:
        schedule_change_check(
            job, asset_task.changeCheckSettings,
            immediate_exec=immediate_exec,
            addition={},
            prelogin=prelogin,
            custom_monitor_urls=custom_urls_data,
            asset_task=asset_task,
            is_manual=False,
            alert_setting=asset_task.alertSettings,
            is_enable_concurrency_limit=is_enable_concurrency_limit,
            task_concurrency=task_concurrency,
            **kwargs
        )
    if (is_manual and asset_task.vulSettings.enable) or not is_manual:
        schedule_vul(
            job, asset_task.vulSettings,
            immediate_exec=immediate_exec,
            addition={},
            prelogin=prelogin,
            custom_monitor_urls=custom_urls_data,
            asset_task=asset_task,
            is_manual=is_manual,
            alert_setting=asset_task.alertSettings,
            is_enable_concurrency_limit=is_enable_concurrency_limit,
            task_concurrency=task_concurrency,
            **kwargs
        )
    if (is_manual and asset_task.securityEventSettings.enable) or not is_manual:
        schedule_security_event(
            job, asset_task.securityEventSettings,
            immediate_exec=immediate_exec,
            addition={},
            custom_monitor_urls=custom_urls_data,
            prelogin=prelogin,
            asset_task=asset_task,
            is_manual=is_manual,
            alert_setting=asset_task.alertSettings,
            is_enable_concurrency_limit=is_enable_concurrency_limit,
            task_concurrency=task_concurrency,
            **kwargs
        )
    if (is_manual and asset_task.contentSettings.enable) or not is_manual:
        schedule_content(
            job,
            asset_task.contentSettings,
            immediate_exec=immediate_exec,
            addition={},
            prelogin=prelogin,
            custom_monitor_urls=custom_urls_data,
            asset_task=asset_task,
            is_manual=is_manual,
            alert_setting=asset_task.alertSettings,
            is_enable_concurrency_limit=is_enable_concurrency_limit,
            task_concurrency=task_concurrency,
            **kwargs
        )
    if job.targetUrl.startswith("https") and ((is_manual and asset_task.sslSettings.enable) or not is_manual):
        schedule_ssl(
            job,
            asset_task.sslSettings,
            immediate_exec=immediate_exec,
            addition={},
            custom_monitor_urls=custom_urls_data,
            asset_task=asset_task,
            is_manual=is_manual,
            alert_setting=asset_task.alertSettings,
            **kwargs
        )
    asset_task.save()
    asset_task.reload()
    if not is_manual:
        job.vulSettings = asset_task.vulSettings
        job.sslSettings = asset_task.sslSettings
        job.securityEventSettings = asset_task.securityEventSettings
        job.contentSettings = asset_task.contentSettings
        job.changeCheckSettings = asset_task.changeCheckSettings
    job.save()


def enable_update_change_pages(asset_task, job=None):
    job = Job.objects.filter(id=asset_task.jobId).first() if not job else job
    prelogin = asset_task.prelogin.to_dict()
    task_settings = asset_task.changeCheckSettings.to_dict()
    include_urls = task_settings.get("collect", {}).get("includeUrl", [])
    if include_urls:
        urls = [urljoin(job.targetUrl, url).__str__() for url in include_urls]
        task_settings["collect"]["includeUrl"] = urls
    is_enable_concurrency_limit, task_concurrency = parse_speed_config(asset_task)
    scheduler = scanner.UpdateChangeCheckBaseScheduler(user_id=str(job.uid)).init_from_setting(
        setting=asset_task.changeCheckSettings,
        target_url=job.targetUrl,
        is_disabled=False,
        is_periodic=False,
        origin_ip=job.sourceIp,
        addition={
            'name': f"{job.note}样本更新",
            'taskSettings': task_settings,
            'target': job.targetUrl,
            'domain': job.target.hostname,
            'triggerType': TaskTriggerType.manual.value,
            'sourceIp': job.sourceIp,
            'taskIdx': 0,
            'jobId': str(job.pk),
            'uid': str(job.uid),
            'taskType': ExTaskType.change_check_config.value,
            'refType': "job",
            'refId': "",
            'prelogin': prelogin
        },
        immediate_exec=True,
        job_id=job.pk.__str__(),
        prelogin=prelogin,
        custom_monitor_urls=handle_custom_urls_data(asset_task.customMonitorPage.to_dict()),
        is_enable_concurrency_limit=is_enable_concurrency_limit,
        task_concurrency=task_concurrency,
        enable_proxy_ip=job.enable_proxy_ip,
        is_update_baseline=True
    )
    scheduler.auto_schedule(insert=False)


def enable_available_task(asset_task, job=None, immediate_exec=False, **kwargs):
    job = Job.objects.filter(id=asset_task.jobId).first() if not job else job
    is_manual = False if asset_task.triggerType != "manual" else True
    immediate_exec = True if is_manual else immediate_exec
    if asset_task.taskGroupType == "http":
        schedule_http(job, asset_task.httpSettings,
                      immediate_exec, asset_task=asset_task,
                      alert_setting=asset_task.alertSettings,
                      **kwargs)
        asset_task.save()
    else:
        schedule_ping(job, asset_task.pingSettings,
                      immediate_exec, asset_task=asset_task,
                      alert_setting=asset_task.alertSettings,
                      **kwargs)
        asset_task.save()


def enable_task_group(asset_task, insert=False, immediate_exec=False):
    """
    insert 修改任务配置是否创建引擎不存在任务，True 引擎不存在任务则创建，False 只修改不创建
    """
    asset_task.reload()
    job = Job.objects.filter(id=asset_task.jobId).first()
    task_group = asset_task.taskGroupType
    if task_group == "web_asset":
        asset = job.get_asset()
        schedule_asset(job, asset, asset_task.assetSettings, immediate_exec=immediate_exec, asset_task=asset_task, insert=insert)
        asset_task.save()
        asset_task.reload()
        asset.taskSettings = asset_task.assetSettings
        asset.save()
    elif task_group == "http":
        enable_available_task(asset_task, job, insert=insert, immediate_exec=immediate_exec)
    elif task_group == "ping":
        enable_available_task(asset_task, job, insert=insert, immediate_exec=immediate_exec)
    elif task_group == "host":
        schedule_host_vul(job, asset_task, asset_task.hostVulSettings, insert=insert, immediate_exec=immediate_exec)
        asset_task.save()
    elif task_group == "web_security":
        enable_web_sec_task(asset_task, job, insert=insert, immediate_exec=immediate_exec,)


def is_not_none(o):
    """
    判断对象是否是None, 用于filter
    :param o:
    :return:
    """
    return o is not None


def handle_custom_urls_data(data):
    data = {
        "includeUrl": data.get("includeUrl", []),
        "excludeUrl": data.get("excludeUrl", [])
    }
    result = {}
    for key, url_list in data.items():
        url_list = set([v.strip() for v in url_list])
        result[key] = list(url_list)
    return result


def parse_speed_config(asset_task):
    data = asset_task.scanSpeedConfig.to_dict()
    scan_speed = data.get("scanSpeed", 0)
    concurrency = data.get("concurrency", 0)
    task_concurrency = 0
    is_enable_concurrency_limit = False
    if scan_speed == 0 and concurrency == 0:
        is_enable_concurrency_limit = False
        task_concurrency = 0
    elif scan_speed == 0 and concurrency == SPEED_CONCURRENCY_MAP.get(scan_speed, 0):
        is_enable_concurrency_limit = False
        task_concurrency = 0
    elif scan_speed != 0 or concurrency:
        is_enable_concurrency_limit = True
        task_concurrency = concurrency or SPEED_CONCURRENCY_MAP.get(scan_speed, 0)
    if is_enable_concurrency_limit:
        opened_task_count = 0
        for task_setting in [f"{t}Settings" for t in WS_TASK_GROUP_TUPLE if t != TaskType.ssl.value]:
            if getattr(getattr(asset_task, task_setting, None), "enable", None):
                opened_task_count += 1
        if opened_task_count > 0:
            task_concurrency = round(task_concurrency / opened_task_count) or 1
    return is_enable_concurrency_limit, task_concurrency


def check_has_number(task):
    if task.used >= task.total:
        return False
    return True


def check_job_balance(balance):
    task_check_list = [balance.domainEnable, ]
    for task in task_check_list:
        if not check_has_number(task):
            return False
    return True


def check_job_settings_interval(balance, job, asset):
    def _check(job_settings, balance_settings):
        if job_settings.enable and job_settings.interval < balance_settings.total:
            job_settings.interval = balance_settings.total

    _check(job.vulSettings, balance.vulMaxFreq)
    _check(job.securityEventSettings, balance.securityEventMaxFreq)
    _check(job.contentSettings, balance.contentMaxFreq)
    http_settings = job.httpSettings
    for item in http_settings:
        _check(item, balance.httpMaxFreq)

    ping_settings = job.pingSettings
    for item in ping_settings:
        _check(item, balance.pingMaxFreq)

    _check(job.sslSettings, balance.sslMaxFreq)
    _check(asset.taskSettings, balance.assetMaxFreq)


def check_http_max_task_enable_used(http_settings, switch):
    used = 0
    for item in http_settings:
        if item.enable:
            if switch:
                used += 1
            else:
                used -= 1
    return used


def check_ping_max_task_enable_used(ping_settings, switch):
    used = 0
    for item in ping_settings:
        if item.enable:
            if switch:
                used += 1
            else:
                used -= 1
    return used


def check_site_port_trait_used(vul_settings, switch):
    used = 0
    if vul_settings.enableSitePortraitTriggerMonitor:
        if switch:
            used += 1
        else:
            used -= 1
    return used


def check_site_domain_enable_used(switch):
    used = 0
    if switch:
        used += 1
    else:
        used -= 1
    return used


def get_active_user_info(user_id_list):
    user_ids = []
    package_ids = set()
    balance_ids = set()
    user_maps = dict()
    user_package_maps = dict()
    user_balance_maps = dict()

    users = User.objects.find({"_id": {"$in": user_id_list}, "identified": True, "planId": {"$exists": True}})

    if users.count() == 0:
        return list(package_ids), user_ids, user_maps, user_package_maps, list(balance_ids), user_balance_maps

    for user in users:
        user_id = user.id
        package_id = user.planId
        balance_id = user.balanceId
        package_ids.add(package_id)
        balance_ids.add(balance_id)
        user_ids.append(user_id)
        user_maps[user_id] = user
        user_package_maps[user_id] = package_id
        user_balance_maps[user_id] = balance_id

    return list(package_ids), user_ids, user_maps, user_package_maps, list(balance_ids), user_balance_maps


def get_verified_job_list(user_ids, job_ids, enable_alert=None, enable_monitor=None):
    query_dict = {"uid": {"$in": user_ids}, "verification.isVerified": True}

    if enable_alert is not None:
        query_dict['enableAlert'] = enable_alert
    if enable_monitor is not None:
        query_dict['enableMonitor'] = enable_monitor

    if job_ids:
        query_dict.update({"_id": {"$in": job_ids}})
        jobs = Job.objects.find(query_dict)
    else:
        jobs = Job.objects.find(query_dict)

    if jobs.count() == 0:
        return []

    return jobs


def get_balance_maps(balance_ids, switch):
    balance_maps = dict()
    arrow_now = arrow.utcnow()
    query_dict = {"_id": {"$in": balance_ids}}
    if switch:
        query_dict.update({'planExpireTime': {'$gt': arrow_now.datetime}})
    balances = Balances.objects.find(query_dict)
    for balance in balances:
        balance_maps[balance.id] = balance
    return balance_maps


def get_asset_maps(asset_ids):
    asset_maps = dict()
    assets = Asset.objects.filter(pk__in=asset_ids)
    for asset in assets:
        asset_maps[asset.id] = asset
    return asset_maps


def calculate_score(scores):
    """得分规则
    包含 严重 75 + 严重个数
    包含 高危 50 + 严重个数
    包含 中危 25 + 严重个数
    包含 低危 0 + 严重个数
    """
    securities, high, middle, low = 0, 0, 0, 0
    for _score in scores:
        if _score == 5:
            securities += 1
        elif _score == 4:
            high += 1
        elif _score == 3:
            middle += 1
        elif _score == 2:
            low += 1
    if securities:
        score = 75 + securities
    elif high:
        score = 50 + high
    elif middle:
        score = 25 + middle
    elif low:
        score = low
    else:
        score = 0
    return score if score < 100 else 100  # 最高分100


def get_used_time(diff_time, is_str=False):
    one_minute = 60
    oen_hour = one_minute * 60
    one_day = oen_hour * 24
    d = diff_time // one_day
    h = (diff_time - (one_day * d)) // oen_hour
    m = (diff_time - (one_day * d + oen_hour * h)) // one_minute
    s = diff_time - (one_day * d + oen_hour * h + m * one_minute)
    time_list = [d, h, m, s]
    time_ch = ["天", "小时", "分钟", "秒"]
    time_str = ""
    for index, value in enumerate(time_list):
        if value:
            time_str = time_str + f"{time_list[index]}{time_ch[index]}"
    if is_str:
        return time_str
    return time_list


def page_info(page, count, total):
    return {
        'currentPage': page,
        'hasNextPage': True if page * count < total else False,
        'hasPreviousPage': True if page > 1 else False,
        'itemCount': total,
        'pageCount': ((total - 1) // count) + 1,
        'perPage': count
    }


def singleton(cls):
    """
    单例模式装饰器
    usage:
        @singleton
        class A(object):
            pass
    """
    def inner(*args, **kwargs):
        if not hasattr(cls, "instance"):
            instance = cls(*args, **kwargs)
            setattr(cls, "instance", instance)
        return getattr(cls, "instance")
    return inner


def utc_now_str() -> str:
    return arrow.now().utcnow().isoformat()


def utc_now() -> datetime.datetime:
    return arrow.now().utcnow().datetime


def datetime_from_ts(timestamp: int) -> datetime.datetime:
    return arrow.get(timestamp).datetime


def parse_multi_dict(_dict, keys=(), default=None) -> dict or type:
    """
    解析dict
    :param _dict: 目标dict
    :param keys: 按照key层级排序的key列表
    :return: 解析出的value
    :param default: 默认值，默认为None
    """
    if default is None:
        default = {}
    if not _dict:
        return default
    if keys:
        try:
            data = _dict[keys[0]]
        except (IndexError, KeyError) as e:
            return default
        if not data:
            return default
    else:
        return _dict
    for key in keys[1:]:
        try:
            data = data[key]
        except (IndexError, KeyError):
            return default
    return data


def performance_test(exit_flag=True):
    def func_layer(func):
        def wrapper(*args, **kwargs):
            from line_profiler import LineProfiler
            lp = LineProfiler()
            lpw = lp(func)
            func_output = lpw(*args, **kwargs)
            lp.print_stats()
            if exit_flag:
                exit()
            return func_output
        return wrapper
    return func_layer


def balance_task_count_sync(uid, balance):
    """资产余额同步 规则"""

    def deep_getattr(c, attrs):
        for attr in attrs:
            c = getattr(c, attr, None)
        return c

    balance_rule = {
        ('balance', 'domain'): [{'$match': {'uid': uid, '$or': [{'assetType': 'web'},
                                                                {'assetType': {'$exists': False}}]}},
                                {'$group': {'_id': 'count', 'count': {'$sum': 1}}}],
        ('balance', 'host'): [{'$match': {'uid': uid, 'assetType': 'host'}},
                              {'$group': {'_id': 'count', 'count': {'$sum': 1}}}],
        ('balance', 'sitePortrait'): [{'$match': {'uid': uid, 'enableMonitor': True, 'verification.isVerified': True,
                                                  'vulSettings.enable': True,
                                                  'vulSettings.enableSitePortraitTriggerMonitor': True}},
                                      {'$group': {'_id': 'enableSitePortraitTriggerMonitor', 'count': {'$sum': 1}}}]
    }
    # 可用性使用asset_task进行计算，不再使用jobs中的httpSettings/pingSettings
    balance_asset_task_rule = {
        # domainEnable 目前已无实际意义
        ('balance', 'domainEnable'): [{'$match': {'uid': uid, 'enableMonitor': True, 'taskGroupType': 'web_security', 'triggerType': 'schedule'}},
                                      {'$group': {'_id': 'count', 'count': {'$sum': 1}}}],
        ('balance', 'httpMaxTask'): [{'$match': {'uid': uid, 'taskGroupType': 'http', 'triggerType': 'schedule'}},
                                     {'$group': {'_id': 'http', 'count': {'$sum': 1}}}],
        ('balance', 'pingMaxTask'): [{'$match': {'uid': uid, 'taskGroupType': 'ping', 'triggerType': 'schedule'}},
                                     {'$group': {'_id': 'ping', 'count': {'$sum': 1}}}],
        ('balance', 'httpMaxTaskEnable'): [
            {'$match': {'uid': uid, 'enableMonitor': True, 'taskGroupType': 'http', 'triggerType': 'schedule'}},
            {'$group': {'_id': 'httpEnable', 'count': {'$sum': 1}}}],
        ('balance', 'pingMaxTaskEnable'): [
            {'$match': {'uid': uid, 'enableMonitor': True, 'taskGroupType': 'ping', 'triggerType': 'schedule'}},
            {'$group': {'_id': 'pingEnable', 'count': {'$sum': 1}}}],
    }
    actions = {}
    for key, aggregate in balance_rule.items():
        jobs = list(Job.objects.aggregate(aggregate))
        count = jobs[0].get('count', 0) if jobs else 0
        if not isinstance(count, (int, float)):
            continue

        actions['.'.join([*key, 'used'])] = count

        #  todo 需要更具充值情况具体判断 逻辑按照web端 node
        charges = deep_getattr(balance, [*key, 'charges'])
        if charges:
            actions['.'.join([*key, 'charges', '0', 'used'])] = count

    for key, aggregate in balance_asset_task_rule.items():
        asset_tasks = list(AssetTasks.objects.aggregate(aggregate))
        count = asset_tasks[0].get('count', 0) if asset_tasks else 0
        if not isinstance(count, (int, float)):
            continue

        actions['.'.join([*key, 'used'])] = count

    if actions:
        getattr(Balances, '_get_collection')().update_one({'uid': uid}, {'$set': actions})
