# -*- coding: utf8  -*-

from __future__ import print_function
import argparse
import multiprocessing
import time
import sys
import os.path
import six
from abc import ABCMeta, abstractmethod
from contextlib import contextmanager
import traceback
import signal
import json
import gevent
from gevent import Greenlet, Timeout, local
from gevent.lock import BoundedSemaphore
try:
    import configparser
except:
    import ConfigParser as configparser

from phoenix_mns.mns_log.log import init_log_from_config, logger as default_logger
from phoenix_mns.client.mns_client import MnsClient


def onsignal_term(arg1, arg2):
    global STOP_WORKER_FLAG
    STOP_WORKER_FLAG = True
    default_logger.warn("mns client is stopping by signal TERM")


STOP_WORKER_FLAG = False
signal.signal(signal.SIGTERM, onsignal_term)

BASE = "Base"
PAUSE_FILE = "PauseFile"
LOG_SWITCH = "LogSwitch"
ENABLE_FLAT_LOG = 'enable_flat_log'
ENABLE_MNS_LOG = 'enable_mns_log'
FUNC_EXEC_TIMEOUT = 'func_exec_timeout'
required_ops = [(BASE, PAUSE_FILE)]


def log_decorator(func):
    """

    :return:
    """
    def serve(*args, **kwds):
        start = kwds['starttime'] if 'starttime' in kwds else time.time()
        ret = func(*args, **kwds)
        succ = 1 if ret is True else 0
        end = time.time()
        elsc = (end-start) * 1000
        inst = args[0]
        consume_msg = args[1] if len(args) > 2 else inst.consume_msg
        body_len = len(json.dumps(consume_msg))
        dequeue_count = args[2] if len(args) > 2 else inst.consume_msg_dequeue_count
        if inst.log_switch:
            log_str = inst.format_log_info({
                'succ': succ,
                'msg_size': body_len,
                'cost': elsc,
                'dequeue_count': dequeue_count,
            })
            inst.logger.info(log_str)
        return ret
    return serve


@six.add_metaclass(ABCMeta)
class BasicService(object):
    """Basic Service
    """

    def __init__(self, srv_cfg_file, mns_consume_cfg):
        self.srv_cfg_file = srv_cfg_file
        self.mns_consume_cfg = mns_consume_cfg
        self.mns_consume_client = None
        self.consumer_result = False
        self.init_logger()
        # self.func_exec_timeout = 60
        self.init_from_cfg()
        self.consume_list = []
        self.consume_msg = None
        self.statistic_dict = {}
        self.init_consume_list()

    def init_logger(self):
        self.log_switch = True
        self.enable_flat_log = False
        self.enable_mns_log = False
        try:
            self.logger = init_log_from_config(self.srv_cfg_file)
        except:
            import traceback
            print('custom logger failed, choose default logger')
            print(traceback.print_exc())
            self.logger = default_logger

    def format_log_info(self, extr_info=None):
        self.statistic_dict.update(extr_info or {})

        def flat_dict(data, prefix='', sep='\t'):
            segments = []
            for k, v in list(data.items()):
                normalized_key = k.replace('-', '_')
                name = normalized_key if not prefix else '{}-{}'.format(prefix, normalized_key)
                if isinstance(v, dict):
                    segment = flat_dict(v, prefix=name)
                else:
                    segment = '{}={}'.format(name, str(v))

                segments.append(segment)

            return sep.join(segments)

        def make_flat(data):
            if isinstance(data, dict):
                return flat_dict(data)
            else:
                return str(data)

        if self.enable_flat_log:
            return make_flat(self.statistic_dict)
        else:
            return "statistic=" + json.dumps(self.statistic_dict)

    @abstractmethod
    def init_consume_list(self):
        """
        should init consume_list which will be exec when consume.
        init consume_list like
        self.consume_list = [
            'insert_db',
            'insert_redis'
        ]
        insert_db and insert_redis is the method name in the instance
        :return:
        """
        pass

    def reset(self):
        """
        exec before each consume
        :return:
        """
        self.consume_msg = None
        self.statistic_dict = {}

    def __init_config_parser(self):
        """

        :return:
        """
        self.parser = configparser.ConfigParser()
        self.parser.read(self.srv_cfg_file)
        for sec, op in required_ops:
            if not self.parser.has_option(sec, op):
                self.logger.error("ERROR: need (%s, %s) in %s.\n" % (sec, op, self.srv_cfg_file))
                sys.exit(1)

    def init_from_cfg(self):
        self.__init_config_parser()
        self.pause_file = self.parser.get(BASE, PAUSE_FILE)
        loading_list = [('log_switch', LOG_SWITCH), ('enable_flat_log', ENABLE_FLAT_LOG), ('enable_mns_log', ENABLE_MNS_LOG)]
        for key, cfg in loading_list:
            try:
                value = self.parser.getboolean(BASE, cfg)
                setattr(self, key, value)
            except:
                pass

        try:
            mylogger = self.logger if self.enable_mns_log else None
            self.mns_consume_client = MnsClient(self.mns_consume_cfg, logger=mylogger)
            # 函数执行时间需要小于消息可视时间，否则会引起多个consumer消费同一个消息的场景
            # 历史出现过两次此种情况引起消费者hang住的情况
            # self.func_exec_timeout = self.mns_consume_client.visible_timeout - 2
        except Exception as e:
            self.logger.error("load mns client failed due to %s" % e)
            sys.exit(1)

    @log_decorator
    def consume(self):
        """
        @param work means the msg body you recv from mns
        @return: True if you consume msg successfully, other wise, return False
        never never use sys.exit in the consume method !!!
        """
        self.statistic_dict['consume_list'] = {}
        for each_consume in self.consume_list:
            try:
                ins_method = getattr(self, each_consume)
            except Exception:
                self.logger.error("no %s found in consume list due to %s" % (each_consume, traceback.format_exc()))
                self.statistic_dict['consume_list'][each_consume] = 'not_found'
                return False
            else:
                try:
                    consume_ret = ins_method()
                except Exception:
                    self.logger.error("exec %s method in consume list failed due to %s" % (each_consume, traceback.format_exc()))
                    self.statistic_dict['consume_list'][each_consume] = 'exception'
                    return False
                else:
                    if consume_ret is False:
                        self.statistic_dict['consume_list'][each_consume] = 'false'
                        return False
                self.statistic_dict['consume_list'][each_consume] = 'true'
        return True

    def if_pause(self):
        """
        judge if there is pause file
        @return:
        """
        return os.path.exists(self.pause_file)

    def deserial(self, msg_body, msg_dequeue_count):
        """
        deserial string to struct, now just support json
        :param msg_body:
        :return:
        """
        try:
            self.consume_msg = json.loads(msg_body)
            self.consume_msg_dequeue_count = msg_dequeue_count
        except Exception:
            self.logger.error("json decode failed due to %s" % traceback.format_exc())
            return False
        else:
            return True

    def pre_consume(self):
        """
        @return: True or False
        """
        self.reset()
        msg_body, msg_dequeue_count = self.mns_consume_client.recv_msg()
        if msg_body is None:
            return False
        return self.deserial(msg_body, msg_dequeue_count)

    def post_consume(self):
        """
        check consume result and delete msg from mns or not, and some other staff
        @return:
        """
        if self.consumer_result is True:
            self.mns_consume_client.del_msg()

    @contextmanager
    def consume_wrapper(self):
        """

        @return:
        """
        msg_body = self.pre_consume()
        yield msg_body
        self.post_consume()

    def run(self):
        while True:
            if self.if_pause():
                time.sleep(5)
                continue
            if STOP_WORKER_FLAG:
                self.logger.warn("mns client stop by signal TERM")
                break
            with self.consume_wrapper() as msg_body:
                if msg_body is False:
                    continue
                self.consumer_result = self.consume()


class MultiProcessService(multiprocessing.Process, BasicService):
    """
    multiprocess version consumer
    """

    def __init__(self, srv_cfg_file, mns_consume_cfg):
        multiprocessing.Process.__init__(self)
        BasicService.__init__(self, srv_cfg_file, mns_consume_cfg)

    def run(self):
        BasicService.run(self)


@six.add_metaclass(ABCMeta)
class BasicGeventService(object):
    """
        Basic Gevent Service
    """
    glocal = local.local()
    semaphore = BoundedSemaphore(1)

    def __init__(self, srv_cfg_file, mns_consume_cfg):
        self.srv_cfg_file = srv_cfg_file
        self.mns_consume_cfg = mns_consume_cfg
        self.mns_consume_client = None
        self.init_logger()
        self.func_exec_timeout = None
        self.init_from_cfg()
        self.consume_list = []
        self.init_consume_list()

    def init_logger(self):
        self.log_switch = True
        self.enable_flat_log = False
        self.enable_mns_log = False
        try:
            self.logger = init_log_from_config(self.srv_cfg_file)
        except:
            import traceback
            print('custom logger failed, choose default logger')
            print(traceback.print_exc())
            self.logger = default_logger

    def format_log_info(self, extra_info=None):
        self.glocal.statistic_dict.update(extra_info or {})

        def flat_dict(data, prefix='', sep='\t'):
            segments = []
            for k, v in list(data.items()):
                normalized_key = k.replace('-', '_')
                name = normalized_key if not prefix else '{}-{}'.format(prefix, normalized_key)
                if isinstance(v, dict):
                    segment = flat_dict(v, prefix=name)
                else:
                    segment = '{}={}'.format(name, str(v))

                segments.append(segment)

            return sep.join(segments)

        def make_flat(data):
            if isinstance(data, dict):
                return flat_dict(data)
            else:
                return str(data)

        if self.enable_flat_log:
            return make_flat(self.glocal.statistic_dict)
        else:
            return "statistic=" + json.dumps(self.glocal.statistic_dict)

    @abstractmethod
    def init_consume_list(self):
        """
        should init consume_list which will be exec when consume.
        init consume_list like
        self.consume_list = [
            'insert_db',
            'insert_redis'
        ]
        insert_db and insert_redis is the method name in the instance
        :return:
        """
        pass

    def __init_config_parser(self):
        """

        :return:
        """
        self.parser = configparser.ConfigParser()
        self.parser.read(self.srv_cfg_file)
        for sec, op in required_ops:
            if not self.parser.has_option(sec, op):
                self.logger.error("ERROR: need (%s, %s) in %s.\n" % (sec, op, self.srv_cfg_file))
                sys.exit(1)

    def init_from_cfg(self):
        self.__init_config_parser()
        self.pause_file = self.parser.get(BASE, PAUSE_FILE)
        loading_list = [
            ('log_switch', LOG_SWITCH, 'getboolean'),
            ('enable_flat_log', ENABLE_FLAT_LOG, 'getboolean'),
            ('enable_mns_log', ENABLE_MNS_LOG, 'getboolean'),
            ('func_exec_timeout', FUNC_EXEC_TIMEOUT, 'getfloat'),
        ]
        for key, cfg, method in loading_list:
            try:
                value = getattr(self.parser, method)(BASE, cfg)
                setattr(self, key, value)
            except:
                pass

        try:
            mylogger = self.logger if self.enable_mns_log else None
            self.mns_consume_client = MnsClient(self.mns_consume_cfg, logger=mylogger)
        except Exception as e:
            self.logger.error("load mns client failed due to %s" % e)
            sys.exit(1)

    @log_decorator
    def consume(self, payload, dequeue_count):
        """
        @param work means the msg body you recv from mns
        @return: True if you consume msg successfully, other wise, return False
        never never use sys.exit in the consume method !!!
        """
        self.glocal.statistic_dict = {'gevent': id(gevent.getcurrent())}
        self.glocal.statistic_dict['consume_list'] = {}
        for each_consume in self.consume_list:
            try:
                ins_method = getattr(self, each_consume)
            except Exception:
                self.logger.error("no %s found in consume list due to %s" % (each_consume, traceback.format_exc()))
                self.glocal.statistic_dict['consume_list'][each_consume] = 'not_found'
                return False
            else:
                try:
                    consume_ret = ins_method(payload, dequeue_count)
                except Exception:
                    self.logger.error("exec %s method in consume list failed due to %s" % (each_consume, traceback.format_exc()))
                    self.glocal.statistic_dict['consume_list'][each_consume] = 'exception'
                    return False
                else:
                    if consume_ret is False:
                        self.glocal.statistic_dict['consume_list'][each_consume] = 'false'
                        return False
                self.glocal.statistic_dict['consume_list'][each_consume] = 'true'
        return True

    def if_pause(self):
        """
        judge if there is pause file
        @return:
        """
        return os.path.exists(self.pause_file)

    @abstractmethod
    def run(self):
        pass


class TooLong(Exception):
    pass


class GeventService(Greenlet, BasicGeventService):

    def __init__(self, srv_cfg_file, mns_consume_cfg):
        Greenlet.__init__(self)
        BasicGeventService.__init__(self, srv_cfg_file, mns_consume_cfg)

    @log_decorator
    def log_timeout(self, payload, msg_dequeue_count, starttime=None):
        return False

    # pylint: disable=E0202
    def _run(self):
        while True:
            if self.if_pause():
                gevent.sleep(5)
                continue
            if STOP_WORKER_FLAG:
                self.logger.warn("mns client stop by signal TERM")
                break
            msg_body, msg_dequeue_count, msg_handle = self.mns_consume_client.recv_msg_with_handle()
            if msg_body is None:
                continue
            starttime = time.time()
            payload = json.loads(msg_body)
            if self.func_exec_timeout:
                timer = Timeout(float(self.func_exec_timeout), TooLong)
                timer.start()
                self.logger.debug('new timer=%s for msg handler=%s' % (self.func_exec_timeout, msg_handle))
            else:
                timer = None
            try:
                g = gevent.spawn(self.consume, payload, msg_dequeue_count)
                g.join(timeout=timer)
            except TooLong:
                with self.semaphore:
                    self.logger.debug('timeout error, delete msg handler=%s' % msg_handle)
                    self.mns_consume_client.del_msg_by_handle(msg_handle)
                    # this is parent gevent frame, any problem ?
                    self.glocal.statistic_dict = {'timeout': 'true', 'gevent': id(gevent.getcurrent())}
                    self.log_timeout(payload, msg_dequeue_count, starttime=starttime)
                    g.kill()
            else:
                if g.value:
                    self.logger.debug('consume success, delete msg handler=%s' % msg_handle)
                    self.mns_consume_client.del_msg_by_handle(msg_handle)
            finally:
                if isinstance(timer, Timeout):
                    timer.cancel()
    # pylint: enable=E0202


def get_workers_num():
    num = multiprocessing.cpu_count()
    parser = argparse.ArgumentParser(description='worker number')
    parser.add_argument('-w', type=int, default=num)
    args = parser.parse_args()
    return args.w
