# coding: utf-8

import json
import importlib
import collections

from django_redis import get_redis_connection
from daedalus.celery import app

from jobs.models import TaskLog, TaskConf, Task, TaskHostLog
from .. import constants as task_status

BASE_MODULE = 'task_engine.engines'


class BaseWorker(object):

    '''
        获取任务的id和目标主机列表
        初始化
            1.初始化工作目录
            2.检查对应的任务的信息
                拼接任务的内容
            3.将所有分配到的主机的状态置为运行状态
            4.起一个进程去执行任务
            5.等待任务执行完成
            6.根据主机的执行的任务的删除去设置主机执行任务的结果
    '''
    EXCLUDE_FIELDS = ('timeout', 'hosts')

    def __init__(self, task_log_id, hosts):
        self.task_log_id = task_log_id
        self.hosts = hosts
        try:
            self.task_log = TaskLog.objects.get(id=self.task_log_id)
        except TaskLog.DoesNotExist:
            self.task_log = None

        try:
            self.task = Task.objects.get(id=self.task_log.task_id)
        except Task.DoesNotExist:
            self.task = None

    def init(self):
        self._init_hosts_status()

    def _execute(self):
        execute_module = '{}.{}'.format(BASE_MODULE, self.task.type)
        try:
            # 根据任务的类型去加载task_engine/engines/下面不同的执行器
            module = importlib.import_module(execute_module)
            func = getattr(module, 'run_task')
        except ImportError as e:
            return 'F', 'Import Module Error: {}'.format(e)
        context = {
            'task_name': self.task.name,
            'task_path': self.task.path,
            'task_hosts': self.hosts,
            'task_id': self.task_log.celery_task_id
        }
        context.update(self.task_params)
        status, output = func(**context)
        return status, output

    @property
    def task_params(self):
        if self.task_log.params:
            try:
                kwargs = json.loads(self.task_log.params)
            except:
                kwargs = {}
            [kwargs.pop(field) for field in self.EXCLUDE_FIELDS if kwargs.has_key(field)]
            return kwargs
        return {}

    def finished(self):
        self.receive_tasks()
        self.save_host_task_status()

    def run(self):
        if self.task_log:
            self.init()
            status, output = self._execute()
            print status, output
            self.finished()
            return status, output
        return 'F', 'Task Log Does Not Exits'

    @staticmethod
    def task_host_status(host_task, status):
        if isinstance(host_task, TaskHostLog):
            host_task.status = status
            host_task.save()

    def _init_hosts_status(self):
        for host in self.hosts:
            host_task = TaskHostLog.objects.filter(task_log_id=self.task_log_id).filter(host=host).first()
            if not host_task:
                self._create_host_task_log(host)
                continue
            if host_task.status != task_status.QUEUE:
                self.task_host_status(host_task, task_status.FAILURE)
            else:
                self.task_host_status(host_task, task_status.STARTED)
            print host_task.status

    def _create_host_task_log(self, host):
        task = TaskHostLog(task_log_id=self.task_log_id, host=host, status=task_status.STARTED,
                           result_queue_id=self.task_log.celery_task_id)
        task.save()
        print task.status

    def save_host_task_status(self):
        for host in self.hosts:
            host_task = TaskHostLog.objects.filter(task_log_id=self.task_log_id).filter(host=host).first()
            if host_task:
                host_task.status = self.host_task_status(host_task)
                host_task.save()

    @staticmethod
    def host_task_status(host_task):
        '''
        任务正常情况下都会有输出，通过判断输出返回任务的状态数据结构如下:
        {
            'task_name': {'status': '状态', 'output: '输出'},
            'task_name': {'status': '状态', 'output: '输出'},
        }
        :param host_task:
        :return:
        '''
        status = task_status.FAILURE
        if host_task.status not in task_status.FINISHED_STATUS:
            if host_task.output:
                try:
                    tasks = json.loads(host_task.output)
                except:
                    tasks = {}
                status_set = set()
                for task, data in tasks.items():
                    if not isinstance(data, dict) or not data.has_key('status') or not data.has_key('output'):
                        continue
                    status_set.add(data['status'])
                if status_set.issubset(task_status.SUCCESS_STATUS):
                    status = task_status.SUCCESS
        return status

    def receive_tasks(self):
        conn = get_redis_connection("task_logs")
        # 从任务队列中把任务主机的输出取出保存到数据库中
        event = conn.lpop(self.task_log.celery_task_id)
        result = collections.defaultdict(list)
        while event:
            try:
                event = json.loads(event)
                result[event['host']].append(event)
            except Exception as e:
                pass
            event = conn.lpop(self.task_log.celery_task_id)

        for host, tasks in result.items():
            host_task = TaskHostLog.objects.filter(task_log_id=self.task_log_id).filter(host=host).first()
            if not host_task:
                continue
            try:
                output = json.loads(host_task.output)
            except:
                output = {}
            data = {
                event.get('name', 'default'):
                    (event.get('status', u'F'), event.get('output', u'Receive not has key output'))
                for event in tasks
            }
            output.update(data)
            host_task.output = json.dumps(output)
            host_task.save()


def run_task(task_log_id, hosts):
    worker = BaseWorker(task_log_id=task_log_id, hosts=hosts)
    status, output = worker.run()
    return status, output


def test():
    status, output = run_task(task_log_id=4, hosts=['172.16.1.115', '172.16.1.123'])
    print output
    print status

