import asyncio
import arrow
import json

from pymongo import MongoClient, UpdateOne

from conf.settings import CLOUDMONITOR_MONGODB, CLOUDMONITOR_DB
from utility.monitor import Monitor
from utility.utils import backup_warp
from utility.log import logger
from utility.thread_pool import Pool


ORIGIN_NODE = 'hk-hgc'
NEW_NODE = 'tx-xian'

# NEW_NODE = 'hk-hgc'
# ORIGIN_NODE = 'tx-xian'

db = MongoClient(CLOUDMONITOR_MONGODB)[CLOUDMONITOR_DB]
# db = MongoClient(host='localhost', port=27017)['cloudmonitor']
monitor = Monitor()

task_type_tuple = ('http', 'ping')
arrow_now = arrow.utcnow()


def send(task_type, data):
    return monitor.post(f'/api/v2/jobs/{task_type}/schedule', data)


async def stop_tasks(job_id, update_list: list):
    request_args = {
        "jobId": str(job_id),
        "taskIds": []
    }

    resp = monitor.post('/api/v2/jobs/tasks/stop', request_args)
    if resp and resp.status_code == 200 and update_list:
        db['jobs'].bulk_write(update_list)


@backup_warp(('jobs', ), logger, db, arrow_now)
def check_and_stop_task():
    ioloop = asyncio.get_event_loop()
    tasks = []

    pipeline = [
        {'$match': {'enableMonitor': True}},
        {'$lookup': {'from': 'balances', 'localField': 'uid', 'foreignField': 'uid', 'as': 'balances'}},
        {'$unwind': '$balances'},
        {'$match': {'balances.balance.domain.used': {'$ne': 0}, 'balances.balance.domainEnable.used': 0, 'balances.planExpireTime': {'$lt': arrow_now.datetime}}},
        {'$project': {'_id': 1, 'balance_id': '$balances._id', 'uid': 1}},
    ]

    for job in db.jobs.aggregate(pipeline):
        job_id = job['_id']
        tasks.append(ioloop.create_task(stop_tasks(job_id, [UpdateOne({'_id': job_id}, {'$set': {'enableMonitor': False}})])))

    if tasks:
        ioloop.run_until_complete(asyncio.wait(tasks))
        # ioloop.close()


def main():
    # check_and_stop_task()

    pool = Pool(size=5)

    for task_type in task_type_tuple:
        query = {'verification.isVerified': True, f'{task_type}Settings': {'$elemMatch': {'collect.area': {'$in': [ORIGIN_NODE]}}}}

        # from bson import ObjectId
        # query.update({'_id': ObjectId('6169a0d7233d2b001b0d8670')})

        for job in db['jobs'].find(query):
            settings = job.get(f'{task_type}Settings', [])
            for setting in settings:
                if ORIGIN_NODE in setting['collect']['area']:
                    index = setting['collect']['area'].index(ORIGIN_NODE)
                    setting['collect']['area'][index] = NEW_NODE
            data = {
                "immediateExec": False,
                "jobId": str(job['_id']),
                "enableMonitor": job['enableMonitor'],
                f'{task_type}Settings': settings
            }

            pool.apply_async(send, (task_type, data))

    pool.join()


if __name__ == '__main__':
    main()
