import arrow
import copy
import bson
import logging
import asyncio
import aioredis
from motor.motor_asyncio import AsyncIOMotorClient
from app.config.settings import OUTPUT_QUEUE, REDIS_URL, MONGODB_SETTINGS
from .common import add_cleanup_task

MONGODB_SETTINGS_MAP = {ms.pop("alias", "default"): ms for ms in MONGODB_SETTINGS}

logger = logging.getLogger(__name__)
_aioredis_pools = {}  # type: Dict[str, Any]
_async_mongo_clients = {}  # type: Dict[str, AsyncIOMotorClient]


def get_mongo_client(mongo_url, is_new=False, loop=None, **kwargs):
    if loop:
        kwargs['io_loop'] = loop
    kwargs.pop('connect', None)
    if is_new:
        client = AsyncIOMotorClient(mongo_url, **kwargs)
    else:
        client = _async_mongo_clients.get(mongo_url)
        if client is None:
            client = AsyncIOMotorClient(mongo_url, **kwargs)
            _async_mongo_clients[mongo_url] = client
    return client


async def get_cm_col(col_name):
    mongo_conf = copy.copy(MONGODB_SETTINGS_MAP["default"])
    host = mongo_conf.pop("host")
    db_name = mongo_conf.pop("db", "cloudmonitor")
    mongo_conf.pop("authentication_source", "admin")
    conn = get_mongo_client(mongo_url=host, **mongo_conf)
    _task_col = conn[db_name][col_name].with_options()
    return _task_col


def check_api_module(api):
    """查询用户购买的api是否可用"""
    if not api:
        return False
    api_exp_time = api.get("time")
    if (not api_exp_time) or arrow.get(api_exp_time).datetime.timestamp() < arrow.utcnow().timestamp():
        return False
    return arrow.get(api_exp_time).datetime.timestamp()


def parse_at_id(at):
    item = {}
    if at["taskGroupType"] in ["http", "ping", "ipv6"] and \
            (task_id := at.get(f"{at['taskGroupType']}Settings", {}).get("taskId")):
        item.update({task_id: at["_id"]})
    elif at["taskGroupType"] == "web_asset" and (task_id := at.get("assetSettings", {}).get("taskId")):
        item.update({task_id: at["_id"]})
    elif at["taskGroupType"] == "host" and (task_id := at.get("hostVulSettings", {}).get("taskId")):
        item.update({task_id: at["_id"]})
    elif at["taskGroupType"] == "web_security":
        for t in ["vul", "ssl", "content", "securityEvent", "changeCheck"]:
            if task_id := at.get(f"{t}Settings", {}).get("taskId"):
                item.update({task_id: at["_id"]})
    return item


async def get_task_data_with_ids(task_ids):
    task_col = await get_cm_col("tasks")
    at_col = await get_cm_col("asset_tasks")
    job_col = await get_cm_col("jobs")
    output_col = await get_cm_col("out_put_configs")
    balance_col = await get_cm_col("balances")
    tasks = await task_col.find(
        {"_id": {"$in": task_ids}},
        {"taskType": 1, "_id": 1, "uid": 1, "status": 1, "taskSessionId": 1,
         "startTime": 1, "endTime": 1, "name": 1, "jobId": 1, "taskId": 1}
    ).to_list(length=100)

    if not tasks:
        return []
    tmp_tasks, at_query, uids, jids = [], [], set(), set()
    for task in tasks:
        at_query.append({f"{task['taskType']}Settings.taskId": task["taskId"]})
        uids.add(task["uid"])
        jids.add(task["jobId"])
        tmp_tasks.append({
            "task_type": task["taskType"],     # 任务类型
            "task_status": task["status"],   # 任务状态
            "task_id": task["_id"],          # 任务ID
            "task_session_id": task["taskSessionId"],  # 任务执行ID
            "target": "",           # 任务资产地址
            "target_ip": "",           # 任务资产源IP
            "target_id": task["jobId"],        # 任务资产ID
            "target_name": "",      # 任务资产名称
            "start_time": task["startTime"],       # 任务开始时间
            "end_time": task["endTime"],         # 任务结束时间
            "task_name": task["name"],
            "task_group_id": "",
            "uid": task["uid"],
            "task_scan_id": task["taskId"]
        })
    op_confs = await output_col.find({"uid": {"$in": list(uids)}, "is_active": True}).to_list(length=100)
    if not op_confs:
        return []
    balances = await balance_col.find({"uid": {"$in": list(uids)}}, {"uid": 1, "balance.api": 1}).to_list(length=100)
    balance_map = {b["uid"]: b["balance"].get("api", {}) for b in balances}
    op_conf_map = {}
    for op_conf in op_confs:
        if not check_api_module(balance_map.get(op_conf["uid"])):
            continue
        item = {"url": op_conf.get("url"), "method": op_conf["method"]}
        if auth_type := op_conf.get("auth_type"):
            item["auth_type"] = auth_type
        if add_to := op_conf.get("add_to"):
            item["add_to"] = add_to
        if auth_key := op_conf.get("auth_key"):
            item["auth_key"] = auth_key
        if auth_value := op_conf.get("auth_value"):
            item["auth_value"] = auth_value
        op_conf_map.update({op_conf["uid"]: item})
    uids = list(op_conf_map.keys())
    job_list = await job_col.find({"_id": {"$in": list(jids)}, "uid": {"$in": uids}},
                                  {"targetUrl": 1, "sourceIp": 1, "_id": 1}).to_list(length=100)
    job_map = {j["_id"]: j for j in job_list}
    ats = await at_col.find({"uid": {"$in": uids}, "$or": at_query}).to_list(length=100)
    at_map = {}
    for at in ats:
        at_map.update(parse_at_id(at))
    results = {}
    for tt in tmp_tasks:
        uid = tt.pop("uid")
        task_scan_id = tt.pop("task_scan_id")
        op = op_conf_map.get(uid)
        job = job_map.get(tt["target_id"])
        atid = at_map.get(task_scan_id)
        if not all([op, job, atid]):
            continue
        tt["target"] = job["targetUrl"]
        tt["target_ip"] = job.get("sourceIp") or ""
        tt["task_group_id"] = atid
        if not results.get(uid):
            results[uid] = {"data_list": [tt], **op}
        else:
            results[uid]["data_list"].append(tt)
    return list(results.values())


async def get_redis_pool(redis_url=None, is_new=False, **kwargs) -> aioredis.RedisConnection:
    # return aioredis redis pool indeed
    if is_new:
        redis_pool = await aioredis.create_redis_pool(redis_url, **kwargs)
    else:
        redis_pool = _aioredis_pools.get(redis_url)
        if redis_pool is None:
            redis_pool = await aioredis.create_redis_pool(redis_url, **kwargs)
            _aioredis_pools[redis_url] = redis_pool

            async def _redis_pool_cleanup():
                redis_pool.close()
                await redis_pool.wait_closed()

            add_cleanup_task(_redis_pool_cleanup, '{} redis pool cleanup'.format(redis_pool))
    return redis_pool


async def redis_multi_pop(redis_pool, queue, size=None):
    size = size or 100
    tr = redis_pool.multi_exec()
    tr.lrange(queue, 0, size-1)
    tr.ltrim(queue, size, -1)
    data_list, _ = await tr.execute()
    return data_list


async def yield_result():

    logger.info('start new redis consumer')
    mq_pool = await get_redis_pool(REDIS_URL)
    while True:
        try:
            task_id_list = await redis_multi_pop(mq_pool, OUTPUT_QUEUE)
            if not task_id_list:
                await asyncio.sleep(0.3)
                continue
            task_ids = [bson.ObjectId(i.decode()) if isinstance(i, bytes) else bson.ObjectId(i) for i in task_id_list]
            results = await get_task_data_with_ids(task_ids)
            for result in results:
                yield result
        except KeyboardInterrupt:
            logger.info('got KeyboardInterrupt, exit consumer')
            break
        except aioredis.RedisError as err:
            logger.error('failed to connect to redis: {!r}'.format(err))
            await asyncio.sleep(3)
        except asyncio.CancelledError:
            raise
        except Exception as err:
            logger.exception(repr(err))
            await asyncio.sleep(0.3)
