# coding=utf-8
import Queue
import collections
import multiprocessing
import shutil
import traceback
from abc import abstractmethod
from threading import Thread

import filelock

from app.modules.common.port import protected_port_group_pool
from utils import *
from .utils import serialize, deserialize, LogProxy

log = logging.getLogger(__name__)


class Pipeline(object):
    def __init__(self, name, group = None, timeout = None, **kwargs):
        """
        Initialize executor to be executed

        name  - pipeline name
        group - pipeline group
        """
        # Properties specified by the input parameters -------------------------
        self.running_num = None
        self.name = name
        self.group = group
        self.pid = None
        self.status = PipelineStatus.NEW
        self.lasted_started = None
        self.lasted_finished = None
        self.progress = 0
        self.err_msg = None
        self.timeout = timeout
        self.__params = kwargs
        self.__tasks = []
        self.app = None

    def params(self):
        return self.__params

    def tasks(self):
        return self.__tasks()

    def add_task(self, task):
        task.pipeline = self
        self.__tasks.append(task)

    def set_app(self, app):
        self.app = app

    def _before_execute(self):
        pass

    def _after_execute(self):
        pass

    def execute(self, *args, **kwargs):
        self.__init_params(*args, **kwargs)
        progress = 0
        try:
            self._before_execute()
            self.__fire_onstarted()
            for task in self.__tasks:
                task.execute(self.__params)
                progress += 1
                self.__fire_progress_changed(int((progress / len(self.__tasks)) * 100))
        except BaseException, ex:
            log.error(traceback.format_exc())
            self.__fire_onfailed(err_msg = ex.message)
        else:
            self.__fire_ondone()
        finally:
            self._after_execute()

    def stop(self):
        self.terminate()

    def __init_params(self, *args, **kwargs):
        context = kwargs['context']
        self.__return_queue = context.return_queue
        pipeline_params = kwargs['pipeline_params']
        self.__params.update(pipeline_params)

    def __fire_onstarted(self):
        log.info(u"管道 '%s'运行开始。" % self.name)
        self.status = PipelineStatus.RUNNING
        self.lasted_started = datetime.now()
        self.lasted_finished = None
        self.__put_to_return_queue(PipelineOnStartedEvent(self))

    def __fire_ondone(self):
        log.info(u"管道 '%s'运行完成。" % self.name)
        self.status = PipelineStatus.DONE
        self.lasted_finished = datetime.now()
        self.__put_to_return_queue(PipelineOnDoneEvent(self))

    def __fire_onfailed(self, err_msg):
        self.status = PipelineStatus.FAILED
        self.err_msg = err_msg
        log.error(u"管道 '%s'运行失败。error-msg:%s" % (self.name, self.err_msg))
        self.__put_to_return_queue(PipelineOnFailedEvent(self, self.err_msg))

    def __fire_progress_changed(self, progress):
        log.info(u"管道 '%s'的进度发生变化，进度为%d" % (self.name, progress))
        self.progress = progress
        # self.__put_to_return_queue(PipelineProgressChangedEvent(self, progress))

    def __put_to_return_queue(self, event):
        try:
            self.__return_queue.put(serialize(event), block = True, timeout = 1)
            log.info(u"数据置入返回队列,返回队列大小为%d" % self.__return_queue.qsize())
        except:
            log.error(u'数据置入返回队列发生错误.' + traceback.format_exc())

    def __repr__(self):
        return (
            "<{class_name}("
            "running_num=\"{self.running_num}\","
            "name=\"{self.name}\","
            "pid=\"{self.pid}\","
            "status=\"{self.status}\","
            "progress=\"{self.progress}\","
            "err_msg=\"{self.err_msg}\""
            ")>".format(
                class_name = self.__class__.__name__,
                self = self
            )
        )

    def to_dict(self):
        return {
            'name': self.name,
            'running_num': self.running_num,
            'group': self.group,
            'status': self.status,
            'pid': self.pid,
            'progress': self.progress,
            'lasted_started': self.lasted_started.strftime('%Y-%m-%d %H:%M:%S') if self.lasted_started else None,
            'lasted_finished': self.lasted_finished.strftime('%Y-%m-%d %H:%M:%S') if self.lasted_finished else None,
            'err_msg': self.err_msg
        }

    def get_log(self, _log = None):
        if not _log:
            _log = log
        return LogProxy.get_log_proxy(_log, self.name)


class PipelineCallback(object):
    def on_pipeline_started(self, pipeline):
        pass

    def on_pipeline_done(self, pipeline):
        pass

    def on_pipeline_failed(self, pipeline, error):
        pass


class PipelineFactory(object):
    @abstractmethod
    def build_pipelines(self):
        pass


class PipelineEngine(object):
    TIMEOUT = 600
    LOCK_TIMEOUT = 10

    def __init__(self, concurrency = 1, qmax = 1000):
        self.concurrency = concurrency
        self.t = None
        self.qmax = qmax if qmax is not None else 1000
        self.__pipeline_registry = PipelineRegistry()
        self.__pipeline_state_manager = PipelineStateManager(self)

        self.__pool = ProcessPool(
            max_workers = concurrency,
            max_queue_len = self.qmax,
            timeout = PipelineEngine.TIMEOUT,
            lock_timeout = PipelineEngine.LOCK_TIMEOUT,
            callback = self.__pipeline_state_manager
        )
        self.__pipelin_seq_generator = PipelineSeqGenerator()
        self.__pipeline_lock_factory = PipelineLockFactory()
        self.__pipeline_lock_factory.init()

    def shutdown(self):
        self.__pool.shutdown()
        self.__pipeline_registry.shutdown()
        self.__pipeline_state_manager.shutdown()

    def register_pipeline(self, *args, **kwargs):
        if len(args) > 0:
            if isinstance(args[0], str) and isinstance(args[1], Pipeline):
                pipeline_name = args[0]
                pipeline = args[1]
            elif isinstance(args[0], Pipeline):
                pipeline = args[0]
                pipeline_name = pipeline.name
            else:
                raise TypeError("invalid parameter.")
        else:
            if kwargs.has_key('name') and kwargs.has_key('pipeline'):
                pipeline_name = kwargs['name']
                pipeline = kwargs['pipeline']
            elif kwargs.has_key('pipeline'):
                pipeline = kwargs['pipeline']
                pipeline_name = pipeline.name
            else:
                raise TypeError("invalid parameter.")
        self.__pipeline_registry.add_pipeline(pipeline_name, pipeline)

    def register_pipeline_factory(self, pipeline_factory):
        self.__pipeline_registry.add_pipeline_factory(pipeline_factory)

    def get_pipeline(self, pipeline_name):
        return self.__pipeline_registry.get_pipeline(pipeline_name)

    def remove_pipeline(self, pipeline_name):
        self.__pipeline_registry.remove_pipeline(pipeline_name)

    def get_watch_return_queue_thread_status(self):
        return self.__pipeline_state_manager.get_watch_return_queue_thread_status()

    def restart_watch_return_queue_thread_status(self):
        return self.__pipeline_state_manager.restart_watch_return_queue_thread()

    def available_pipelines(self):
        return self.__pipeline_registry.piplines()

    def pipelines(self):
        return self.__pipeline_registry.piplines()

    def start_pipeline(self, pipeline_name, pipeline_params = {}):
        pipeline = self.__pipeline_registry.get_pipeline(pipeline_name)
        if pipeline is None:
            raise PipelineNotFoundError(pipeline_name)
        # 启动pipline时，如果pipline正在执行中，则不重复启动
        if pipeline.status == PipelineStatus.PENDING or pipeline.status == PipelineStatus.RUNNING:
            log.error(u"管道 '%s'已经启动，不能重复启动." % pipeline.name)
            raise PipelineDuplicateStartError(pipeline_name)

        self.__init_pipeline(pipeline)
        log.info(u"管道 '%s'已初始化并提交到进程池." % pipeline.name)
        self.__pool.submit_pipeline(
            pipeline = pipeline,
            pipeline_params = pipeline_params,
            context = self.__build_context(
                pipeline_lock_factory = self.__pipeline_lock_factory,
                return_queue = self.__pipeline_state_manager.get_return_queue()
            ),

        )
        return pipeline.running_num

    def stop_pipeline(self, pipeline_name):
        pipeline = self.__pipeline_registry.get_pipeline(pipeline_name)
        if pipeline == None:
            raise PipelineNotFoundError(pipeline_name)
        pipeline.stop(True)

    def locks(self):
        return self.__locks

    def pipeline_offset(self, pipeline_running_num):
        return self.__pool.pipeline_offset(pipeline_running_num)

    def subscribe(self, listner):
        """
        Adds observer to notification list.
        """
        self.__pipeline_state_manager.subscribe(listner)

    def unsubscribe(self, listner):
        """
        Removes observer from notification list.
        """
        self.__pipeline_state_manager.unsubscribe(listner)

    def __init_pipeline(self, pipeline):
        pipeline.status = PipelineStatus.READY
        pipeline.running_num = self.__pipelin_seq_generator.next_id()
        pipeline.progress = 0
        pipeline.err_msg = None
        pipeline.lasted_started = None
        pipeline.lasted_finished = None
        pipeline.pid = None

    def on_pipeline_failed(self, pipeline, error):
        pipeline.status = PipelineStatus.FAILED
        pipeline.err_msg = error.message
        self.__pipeline_state_manager.notify(PipelineOnFailedEvent(pipeline, pipeline.err_msg))

    def on_pipeline_msg(self, pipeline, message):
        pipeline.err_msg = message
        self.__pipeline_state_manager.notify(ContainerAppEvent(pipeline, message))

    def __build_context(self, pipeline_lock_factory, return_queue):
        return PipelineContext(pipeline_lock_factory = pipeline_lock_factory, return_queue = return_queue)


class PipelineContext(object):
    def __init__(self, pipeline_lock_factory, return_queue):
        self.pipeline_lock_factory = pipeline_lock_factory
        self.return_queue = return_queue


class PipelineStatus(object):
    NEW = 'NEW'
    READY = 'READY'
    PENDING = 'PENDING'
    RUNNING = 'RUNNING'
    DONE = 'DONE'
    FAILED = 'FAILED'

    def __init__(self):
        super(PipelineStatus, self).__init__()


class PipelineNotFoundError(RuntimeError):
    def __init__(self, pipeline_name, *args, **kwargs):
        super(PipelineNotFoundError, self).__init__(*args, **kwargs)
        self.pipe_name = pipeline_name


class PipelineDuplicateStartError(RuntimeError):
    def __init__(self, pipeline_name, *args, **kwargs):
        super(PipelineDuplicateStartError, self).__init__(*args, **kwargs)
        self.pipe_name = pipeline_name


class PipelineExecuteError(RuntimeError):
    def __init__(self, pipeline, err_msg, *args, **kwargs):
        super(PipelineExecuteError, self).__init__(*args, **kwargs)
        self.pipeline = pipeline
        self.err_msg = err_msg

    def __str__(self):
        return (
            "<{class_name}("
            "pipeline=\"{self.pipeline.name}\","
            "err_msg=\"{self.err_msg}\""
            ")>".format(
                class_name = self.__class__.__name__,
                self = self
            )
        )

    def to_dict(self):
        return {"msg": "pipeline \"{self.pipeline.name}\" execution error,the err_msg is {self.err_msg}.".format(
            self = self)}


class PipelineTimeoutError(RuntimeError):
    def __init__(self, pipeline, *args, **kwargs):
        super(PipelineTimeoutError, self).__init__(*args, **kwargs)
        self.pipeline = pipeline

    def __str__(self):
        return (
            "<{class_name}("
            "pipeline=\"{self.pipeline.name}\""
            ")>".format(
                class_name = self.__class__.__name__,
                self = self
            )
        )

    def to_dict(self):
        return {"msg": "pipeline \"{self.pipeline.name}\" execution timed out.".format(self = self)}


class PipelineAbortedError(RuntimeError):
    def __init__(self, pipeline, exitcode, *args, **kwargs):
        super(PipelineAbortedError, self).__init__(*args, **kwargs)
        self.pipeline = pipeline
        self.exitcode = exitcode

    def __str__(self):
        return (
            "<{class_name}("
            "pipeline=\"{self.pipeline.name}\","
            "exitcode=\"{self.exitcode}\""
            ")>".format(
                class_name = self.__class__.__name__,
                self = self
            )
        )

    def to_dict(self):
        return {"msg": "pipeline \"{self.pipeline.name}\" execution failed,the exitcode is {self.exitcode}.".format(
            self = self)}


class PipelineEvent(object):
    def __init__(self, pipeline):
        super(PipelineEvent, self).__init__()
        self.pipeline = pipeline

    @abstractmethod
    def to_dict(self):
        return self.pipeline.to_dict()

    def __str__(self):
        return (
            "<{class_name}("
            "pipeline=\"{self.pipeline}\""
            ")>".format(
                class_name = self.__class__.__name__,
                self = self
            )
        )


class PipelineStatusEvent(PipelineEvent):
    def __init__(self, pipeline):
        super(PipelineStatusEvent, self).__init__(pipeline)


class PipelineOnStartedEvent(PipelineStatusEvent):
    def __init__(self, pipeline):
        super(PipelineOnStartedEvent, self).__init__(pipeline)

    def to_dict(self):
        _dict = {
            'event': 'pipeline_on_started',
            'pipeline': self.pipeline.to_dict()
        }

        return _dict


class PipelineOnDoneEvent(PipelineStatusEvent):
    def __init__(self, pipeline):
        super(PipelineOnDoneEvent, self).__init__(pipeline)

    def to_dict(self):
        _dict = {
            'event': 'pipeline_on_done',
            'pipeline': self.pipeline.to_dict()
        }

        return _dict


class PipelineOnFailedEvent(PipelineStatusEvent):
    def __init__(self, pipeline, err_msg):
        super(PipelineOnFailedEvent, self).__init__(pipeline)
        self.err_msg = err_msg

    def to_dict(self):
        _dict = {
            'event': 'pipeline_on_failed',
            'pipeline': self.pipeline.to_dict(),
            'err_msg': self.err_msg
        }

        return _dict


class PipelineProgressChangedEvent(PipelineStatusEvent):
    def __init__(self, pipeline, progress):
        super(PipelineProgressChangedEvent, self).__init__(pipeline)
        self.progress = progress

    def to_dict(self):
        _dict = {
            'event': 'pipeline_progress_changed',
            'pipeline': self.pipeline.to_dict(),
            'progress': self.progress
        }

        return _dict


class PipelineRegisterEvent(PipelineEvent):
    def __init__(self, pipeline):
        super(PipelineRegisterEvent, self).__init__(pipeline)


class PipelineUnRegisterEvent(PipelineEvent):
    def __init__(self, pipeline):
        super(PipelineUnRegisterEvent, self).__init__(pipeline)


class PipelineListner(object):
    def __init__(self, name = "PipelineListner"):
        """
        Creates Observer.
        An Observer is uniquely defined by its name.vDefault name is 'observer'.
        """
        if not isinstance(name, str):
            raise TypeError("Expected string for name")
        self.name = name

    def on_event(self, event):
        print str(event)


class PipelineStatusListner(PipelineListner):
    def __init__(self, name = "PipelineStatusListner"):
        PipelineListner.__init__(self, name)


class PipelineRegisterListner(PipelineListner):
    def __init__(self, name = "PipelineRegisterListner"):
        PipelineListner.__init__(self, name)


class PipelineQueueElement(object):
    def __init__(self, item, index):
        self.item = item
        self.index = index


class PipelineQueue(object):
    def __init__(self, maxsize = 0):
        self.current_index = 0
        self.total = 0
        self.__queue = Queue.Queue(maxsize)
        self.__pipelines = collections.OrderedDict()
        self.__indexes = {}

    def get(self, block = True, timeout = None):
        element = self.__queue.get(block, timeout)
        self.current_index = element.index
        item = element.item
        # key = self.__get_pipeline_running_num(item)
        # self.__indexes.pop(key)
        return item

    def put(self, item, block = True, timeout = None):
        self.total += 1
        item_index = self.total
        key = self.__get_pipeline_running_num(item)
        self.__indexes[key] = item_index
        self.__queue.put(PipelineQueueElement(item, item_index), block, timeout)
        self.__pipelines

    def __get_pipeline_running_num(self, item):
        pipeline = item[0]
        return pipeline.running_num

    def offset(self, pipeline_running_num):
        index = self.__indexes.get(repr(pipeline_running_num))
        return index - self.current_index if index else None,

    def full(self):
        return self.__queue.full()

    def empty(self):
        return self.__queue.empty()

    def task_done(self):
        return self.__queue.task_done()

    def join(self):
        self.__queue.join()

    def qsize(self):
        return self.__queue.qsize()


class PipelineRegistry(object):
    def __init__(self):
        self.__pipelines = collections.OrderedDict()
        self.__pipeline_factories = []

    def add_pipeline(self, pipeline_name, pipeline):
        # 如果pipeline_name已经存在，则不添加
        if self.__pipelines.__contains__(str(pipeline_name)) or \
                self.__pipelines.has_key(str(pipeline_name)):
            log.warn(u"任务%s已存在，不需要重复注册" % pipeline_name)
            return
        if pipeline:
            self.__pipelines[pipeline_name] = pipeline
            pipeline.status = PipelineStatus.READY

    def get_pipeline(self, pipeline_name):
        return self.__pipelines.get(pipeline_name)

    def remove_pipeline(self, pipeline_name):
        self.__pipelines.pop(pipeline_name)

    def piplines(self):
        return self.__pipelines.values()

    def add_pipeline_factory(self, pipeline_factory):
        self.__pipeline_factories.append(pipeline_factory)
        pipelines = pipeline_factory.build_pipelines()
        for pipeline in pipelines:
            self.add_pipeline(pipeline.name, pipeline)

    def shutdown(self):
        self.__pipelines.clear()
        self.__pipeline_factories = None


class PipelineStateManager(PipelineCallback):
    def __init__(self, pipeline_engine):
        self.__pipeline_engine = pipeline_engine
        self.__manager = multiprocessing.Manager()
        self.__return_queue = self.__manager.Queue()
        self.__pipeline_status_listers = []
        self.__pipeline_register_listners = []
        self.__start_watch_return_queue_thread()

    def get_return_queue(self):
        return self.__return_queue

    def get_watch_return_queue_thread_status(self):
        alive = self.__watch_return_queue_thread.isAlive()
        # if not alive:
        #    self.__start_watch_return_queue_thread()
        return alive

    def restart_watch_return_queue_thread(self):
        self.__start_watch_return_queue_thread()

    def shutdown(self):
        self.__pipeline_register_listners = None
        self.__pipeline_status_listers = None

    def subscribe(self, listner):
        """
        Adds observer to notification list.
        """
        if not isinstance(listner, PipelineListner):
            raise TypeError("Subscriber must be PipelineListner")
        if isinstance(listner, PipelineRegisterListner):
            self.__pipeline_register_listners.append(listner)
        if isinstance(listner, PipelineStatusListner):
            self.__pipeline_status_listers.append(listner)

    def unsubscribe(self, listner):
        """
        Removes observer from notification list.
        """
        if not isinstance(listner, PipelineListner):
            raise TypeError("Unsubscriber must be PipelineListner")

        if not (listner in self.__pipeline_register_listners):
            self.__pipeline_register_listners.remove(listner)

        if not (listner in self.__pipeline_status_listers):
            self.__pipeline_status_listers.remove(listner)

    @async
    def notify(self, pipeline_event):
        """
        Sends pipeline event to subscribed listner
        """
        if isinstance(pipeline_event, PipelineStatusEvent):
            for listner in self.__pipeline_status_listers:
                try:
                    _pipe_dict = pipeline_event.to_dict()
                    _log_pipe_dict = json.dumps(_pipe_dict, encoding = "UTF-8", ensure_ascii = False)
                    log.info(
                        u"管道%s产生事件%s，开始通知监听者%s..."
                        % (pipeline_event.pipeline.name, _log_pipe_dict, str(listner))
                    )
                    start_time = datetime.now()
                    listner.on_event(pipeline_event)
                    end_time = datetime.now()
                    time = (end_time - start_time).seconds
                    if time > 5:
                        log.warn(u"通知监控者%s,事件数据%s，耗时%d秒" % (str(listner), _log_pipe_dict, time))
                    log.info(u"事件通知监听者完成.")
                except:
                    log.error(traceback.format_exc())
                    log.warn("Sends pipeline event to subscribed listner error.")
        elif isinstance(pipeline_event, PipelineRegisterEvent):
            for listner in self.__pipeline_register_listners:
                try:
                    listner.on_event(pipeline_event)
                except:
                    log.error(traceback.format_exc())
                    log.warn("Sends pipeline event to subscribed listner error.")

    def __start_watch_return_queue_thread(self):
        self.__watch_return_queue_thread = Thread(target = self.__watch_pipeline_return_queue,
                                                  args = ())
        self.__watch_return_queue_thread.daemon = True
        self.__watch_return_queue_thread.start()

    def __watch_pipeline_return_queue(self):
        while True:
            try:
                str = self.__return_queue.get(block = True, timeout = 60)
                log.info(u"从返回队列中获取到数据,返回队列大小为%d" % self.__return_queue.qsize())
                obj = deserialize(str)
                if isinstance(obj, PipelineEvent):
                    pipeline = self.__pipeline_engine.get_pipeline(obj.pipeline.name)
                    if pipeline:
                        if isinstance(obj, PipelineOnStartedEvent):
                            self.on_pipeline_started(pipeline)
                        if isinstance(obj, PipelineOnDoneEvent):
                            self.on_pipeline_done(pipeline)
                        if isinstance(obj, PipelineOnFailedEvent):
                            self.on_pipeline_failed(pipeline, obj.err_msg)
                        if isinstance(obj, PipelineProgressChangedEvent):
                            self.on_pipeline_progress_changed(pipeline, obj.progress)
            except Queue.Empty:
                log.info(u"返回队列为空！")
                # time.sleep(1)
                continue
            except:
                log.error(u"事件数据处理失败")
                log.error(traceback.format_exc())

    def on_pipeline_started(self, pipeline):
        log.info("The pipeline '%s' is running" % pipeline.name)
        pipeline.status = PipelineStatus.RUNNING
        pipeline.lasted_started = datetime.now()
        pipeline.lasted_finished = None
        self.notify(PipelineOnStartedEvent(pipeline))

    def on_pipeline_done(self, pipeline):
        log.info("The pipeline '%s' is done." % pipeline.name)
        pipeline.status = PipelineStatus.DONE
        pipeline.lasted_finished = datetime.now()
        self.notify(PipelineOnDoneEvent(pipeline))

    def on_pipeline_failed(self, pipeline, err_msg):
        pipeline.status = PipelineStatus.FAILED
        pipeline.err_msg = err_msg
        log.warn("The pipeline '%s' failed." % pipeline.name)
        self.notify(PipelineOnFailedEvent(pipeline, err_msg))

    def on_pipeline_msg(self, pipeline, message):
        pipeline.err_msg = message
        self.notify(ContainerAppEvent(pipeline, message))

    def on_pipeline_progress_changed(self, pipeline, progress):
        log.info("The pipeline '%s' progress changed to %d." % (pipeline.name, progress))
        pipeline.progress = progress
        self.notify(PipelineProgressChangedEvent(pipeline, progress))


class ProcessPool:
    def __init__(self, max_workers = 2, max_queue_len = 1000, timeout = 0, lock_timeout = 1, callback = None):
        if max_workers is None:
            self.max_workers = multiprocessing.cpu_count()
        else:
            self.max_workers = max_workers
        self.max_queue_len = max_queue_len
        self.timeout = timeout
        self.lock_timeout = lock_timeout
        self.__callback = callback
        self.__pipeline_queue = Queue.Queue(max_queue_len)  # PipelineQueue(max_queue_len)  # 任务等待队列
        self.__workers = []
        self.__create_pool()

    def __create_pool(self):
        for i in xrange(self.max_workers):
            self.__workers.append(
                WorkThread(self.__pipeline_queue, timeout = self.timeout, lock_timeout = self.lock_timeout,
                           callback = self.__callback))

    def pipeline_offset(self, pipeline_running_num):
        return self.__pipeline_queue.offset(pipeline_running_num)

    def submit_pipeline(self, pipeline, *args, **kwargs):
        try:
            pipeline.status = PipelineStatus.PENDING
            self.__pipeline_queue.put((pipeline, args, kwargs))
            log.info(u"管道 '%s'已提交到执行队列." % pipeline.name)
        except Queue.Full:
            log.error(traceback.format_exc())
            raise  # 队列已满时直接抛出异常，不给执行
        return self.__pipeline_queue.qsize()

    def is_safe(self):
        '''等待的任务数量离警界线还比较远
        '''
        return self.__pipeline_queue.qsize() < 0.9 * self.max_queue_len

    def shutdown(self):
        self.__pipeline_queue = None
        self.__workers = None


class PipelineProcess(multiprocessing.Process):
    def __init__(self, pipeline, *args, **kwargs):
        super(PipelineProcess, self).__init__(name = pipeline.name)
        self.pipeline = pipeline
        self.args = args
        self.kwargs = kwargs

    def run(self):
        protected_port_group_pool.clean()

        self.pipeline.execute(*self.args, **self.kwargs)


class WorkThread(threading.Thread):
    def __init__(self, pipeline_queue, timeout = 0, lock_timeout = 1, callback = None):
        """Initialize executor to be executed

        pipeline_queue  - start time is filled automatically on the execution start
        timeout  - pipeline timeout. Default: 0, means infinity
        onstart  - callback which is executed on the pipeline starting (before the execution
            started) in the CONTEXT OF THE CALLER (main process) with the single argument,
            the pipeline. Default: None
            ATTENTION: must be lightweight
            NOTE: can be executed a few times if the pipeline is restarted on timeout
        ondone  - callback which is executed on successful completion of the pipeline in the
            CONTEXT OF THE CALLER (main process) with the single argument, the pipeline. Default: None
            ATTENTION: must be lightweight
        """
        assert pipeline_queue != None, "The 'pipeline' parameters must not be null."
        threading.Thread.__init__(self)

        self.__pipeline_queue = pipeline_queue
        self.__callback = callback
        self.timeout = timeout
        self.lock_timeout = lock_timeout
        self.idle = True

        self.setDaemon(True)
        self.start()

    def run(self):
        multiply = 0
        while True:
            try:
                # 从队列中取一个pipeline
                pipeline, args, kwargs = self.__pipeline_queue.get(block = False, timeout = 60)
                log.info(u"管道 '%s'已从执行队列中获取出来，即将启用进程来执行." % pipeline.name)
                try:
                    self.idle = False
                    context = kwargs['context']
                    lock = self.__get_lock(context, pipeline)
                    with lock.acquire(timeout = self.lock_timeout):
                        # 执行pipeline
                        pipeline_process = PipelineProcess(pipeline, *args, **kwargs)
                        pipeline_process.name = pipeline.name
                        pipeline_process.daemon = True
                        pipeline_process.start()
                        pipeline.pid = pipeline_process.pid
                        log.info(u"管道 '%s'已开始执行，对应的进程号是'%d'。" % (pipeline.name, pipeline.pid))
                        pipeline_process.join(pipeline.timeout if pipeline.timeout else self.timeout)  # 等待进程执行完成
                        log.info(u"管道 '%s'已执行结束，对应的进程退出码是'%s'。" % (pipeline.name, str(pipeline_process.exitcode)))
                        if pipeline_process.is_alive():  # 超时后终止进程并抛出PipelineTimeoutError
                            pipeline_process.terminate()
                            raise PipelineTimeoutError(pipeline)
                        elif pipeline_process.exitcode != 0 and pipeline_process.exitcode != 1:  # 当进程被kill时，抛出PipelineAbortedError；exitcode=0正常退出，exitcode=1抛异常退出
                            raise PipelineAbortedError(pipeline, exitcode = pipeline_process.exitcode)
                except filelock.Timeout:
                    log.info(u"管道 '%s'加锁失败，重新置入执行队列." % pipeline.name)
                    self.__pipeline_queue.put((pipeline, args, kwargs))
                    # 加锁失败要发消息通知
                    # notifier.send_message(ContainerAppEvent(pipeline = pipeline, app = _app))
                    self.__fire_onmessage(pipeline,"任务'%s'加锁失败,请稍后查看任务状态." % pipeline.name)
                except BaseException, e:
                    log.error(traceback.format_exc())
                    self.__fire_onfailed(pipeline, e.message)
            except Queue.Empty:
                # print u'当前线程:%s' % threading.currentThread().getName()
                time.sleep(1)
                self.idle = True
                continue
            except:
                log.error(u'发生未知错误.' + traceback.format_exc())

    def __get_lock(self, context, pipeline):
        return context.pipeline_lock_factory.get_lock(pipeline)

    def __lock(self, lock):
        if lock:
            lock.acquire()

    def __unlock(self, lock):
        if lock:
            lock.release()

    def __fire_onfailed(self, pipeline, err_msg):
        if self.__callback:
            self.__callback.on_pipeline_failed(pipeline, err_msg)

    def __fire_onmessage(self, pipeline, msg):
        if self.__callback:
            self.__callback.on_pipeline_msg(pipeline, msg)

class PipelineSeqGenerator(object):
    ID_LENGTH = 5

    def __init__(self):
        self.id = 0;

    def next_id(self):
        self.id += 1
        return str(self.id).zfill(PipelineSeqGenerator.ID_LENGTH)


class PipelineLockFactory(object):
    def __init__(self, lock_file_dir = "lock"):
        self.lock_file_dir = os.path.normpath(lock_file_dir)

    def init(self):
        try:
            shutil.rmtree(self.lock_file_dir)
        except BaseException, e:
            log.warn(u'清理pipeline文件锁目录时发生错误:%s' % e.message)
        try:
            os.makedirs(self.lock_file_dir)
        except BaseException, e:
            log.warn(u'创建pipeline文件锁目录时发生错误:%s' % e.message)

    def get_lock(self, pipeline):
        if pipeline.group:
            lock_file = os.path.join(self.lock_file_dir,
                                     pipeline.group + '.lock')
        else:
            lock_file = os.path.join(self.lock_file_dir,
                                     self.__randam_filename(pipeline.name) + '.lock')
        return filelock.FileLock(lock_file)

    def __randam_filename(self, prefix):
        return prefix + '_' + ''.join(map(lambda xx: (hex(ord(xx))[2:]), os.urandom(8)))


class ContainerAppEvent(PipelineEvent):
    def __init__(self, app, pipeline):
        self._app = app
        self._pipeline = pipeline

    def to_dict(self):
        _dict = {
            'name': 'container_app_event',
            'event': 'container_app_event',
            'pipeline': self._pipeline.to_dict(),
            'app': self._app.to_dict(),
        }

        return _dict

    def __str__(self):
        return (
            "<{class_name}("
            "app=\"{self._app}\","
            "pipeline=\"{self._pipeline}\""
            ")>".format(
                class_name = self.__class__.__name__,
                self = self
            )
        )

