#!/usr/bin/env python

import os
import re
import sys
import signal
import MySQLdb
import logging
import functools
import traceback
from threading import Thread
from threading import RLock
from threading import Event
from redis import Connection
from random import randint
from Queue import Queue
from time import sleep, strftime, time
from DBUtils.PooledDB import PooledDB
# import setproctitle


# MySQL config
MySQL_host = 'localhost'
MySQL_user = 'root'
MySQL_password = '123456'
MySQL_db = 'finy1'

# host status
HOST_NORMAL = 0
HOST_DISABLE = 1
HOST_TEMPLATE = 2

# item status
ITEM_NORMAL = 0
ITEM_DISABLE = 1
ITEM_TEMPLATE = 2

# item value type
ITEM_VALUE_NUMERIC = 0
ITEM_VALUE_STRING = 1

# history type table name
HISTORY_NUMERIC = 'history'
HISTORY_STRING = 'history_str'


# temp cache insert sql file
sqlfile = os.path.join(os.path.dirname(__file__), "sql_execute_fail.sql")

# log Level default: INFO
logLevel = logging.DEBUG

# logging setting

# log file path
logfile = '/root/redis_collect.log'

# logging formater
logformat = logging.Formatter("%(asctime)s - %(levelname)s - %(name)s - " +
                              "%(message)s")
# logging hendler
logging_hendler = logging.FileHandler(logfile)
logging_hendler.setFormatter(logformat)

# filename del .py
filename = os.path.split(__file__)[1]
filename = filename[:-3] if filename[-3:] == ".py" else filename

# main log logger
logger = logging.getLogger(filename)
logger.addHandler(logging_hendler)
logger.setLevel(logLevel)


class Collect_Error(Exception):
    pass


class util(object):
    @staticmethod
    def exception(func):
        log = logger.getChild("exception")

        def warpper(*args, **kwargs):
            try:
                return func(*args, **kwargs)
            except:
                log.error("function: %s runtime exception", exc_info=True)
                return False
        warpper.__doc__ = func.__doc__
        warpper.__name__ = func.__name__
        return warpper

    @staticmethod
    def Conversion_Type(data, type_func):
        if isinstance(data, type_func):
            return data
        return type_func(data)

    @staticmethod
    def kwargs_default(k, key, default):
        return k[key] if key in ' '.join(k.keys()) else default

    def Disable_Item(func):
        @functools.wraps(func)
        def wraps(*args, **kwargs):
            try:
                func(*args, **kwargs)
            except:
                print traceback.print_exc()
                return (args[0][0], None, False)

    @staticmethod
    def PRunTime(func):
        def wraps(*args, **kwargs):
            s = time()
            func(*args, **kwargs)
            print func.func_name, "Run time: %0.3f" % (time() - s)
        return wraps

    @staticmethod
    def history(val_type):
        if val_type == ITEM_VALUE_NUMERIC:
            return HISTORY_NUMERIC
        else:
            return HISTORY_STRING

    @staticmethod
    def info_get(info, key, hostid):
        '''info: redis info
           key: key_
           hostid: hostid
           get redis info() value, and get child value
           return  redis_info_value
        '''
        if '.' in key:
            dbnum = 0
            key_array = key.format(dbnum).split('.')
            value = info
            for k in key_array:
                value = value.get(k)
                if not value:
                    break
            return value
        else:
            return info.get(key)

    @staticmethod
    def value_get(info, *args):
        '''info: is redis info()
          args[1]: itemid
          args[2]: key_
          args[3]: hostid
          args[4]: value_type
          return (itemid, info_value, value_type, status(bool))
        '''
        if not args or len(args) != 4:
            return
        itemid, key_, hostid, var_type = args
        if isinstance(key_, bytearray):
            key_ = str(key_)
        redis_info_value = util.info_get(info, key_, hostid)
        return (itemid, redis_info_value, var_type,
                True if redis_info_value else False)


class base(object):
    def __init__(self):
        self.name = 'thread'
        self.logger = None
        self.cache = {}
        # format is {key: {'value': value, 'date': time()}}
        self.cache_timeout = 30  # unit is sec default 30 sec
        self.queue_cache = None  # queue cache
        self.queue_cache_status = 0   # queue cache use status 0 is not use
        self.queue_cache_type = None  # queue cache save type
        self.last_error_time = 0     # 0 is not use, not 0 is used
        self.last_error_num = 1      # 0 is try call func , not 0 wait
        self.last_error_expired = 7200  # max 2 hour , reset last error time
        self.default_sleep_sec = 300
        self.error_max = 100
        self.error_num = 0
        self.error_gt_num_sleep_num = 10
        self.error_gt_num_sleep_time = [self.default_sleep_sec * num for num
                                        in range(20)]
        self.error_gt_num_now = 0

    def init_log(self):
        self.logger = logger.getChild(self.name)

    def cache_add(self, key, value):
        self.cache[key] = {'value': value, 'date': time()}

    def cache_del(self, key):
        val = self.cache.get(key)
        if val:
            del self.cache[key]
            return True
        else:
            return False

    def cache_get(self, key):
        return self.cache.get(key)

    def queue_cache_init(self, ntype=None):
        self.queue_cache_type = ntype
        self.queue_cache = ntype

    def queue_cache_get_status(self):
        if self.queue_cache_status:
            return True
        else:
            return False

    def queue_cache_set(self, val):
        self.queue_cache = val

    def queue_cache_add(self, val):
        self.queue_cache.append(val)

    def queue_cache_del(self):
        self.queue_cache = self.queue_cache_type

    def queue_cache_get(self):
        return self.queue_cache

    def stop(self):
        pass

    def event_hit(self):
        pass

    def get_db(self, con_object):
        con = con_object.pool.connection()
        return db(con=con, **con_object.config)

    def error_wait(self):
        log = self.logger.getChild(self.error_wait.__name__)
        if self.error_num >= self.error_gt_num_sleep_num:
            if len(self.error_gt_num_sleep_time) < self.error_gt_num_now:
                log.error('%s error_num: %d > error_max: %d, thread ex' +
                          'iting...........' % (self.name, self.error_num,
                                                self.error_max))
                self.stop()
                return
            self.sleep(self.error_gt_num_sleep_time[self.error_gt_num_now])
            log.info('%s error_num: %d, sleep %s sec, waiting...' % (
                self.name, self.error_num, self.error_gt_num_sleep_time[
                    self.error_gt_num_now]))
            self.error_gt_num_now += 1

    def sleep(self, time):
        now_sec = 0
        sleep_delay = 0.99998
        if time <= 1:
            sleep(time)
            self.event_hit()
            self.error_wait()
            return
        while True:
            sleep(sleep_delay)
            self.event_hit()
            self.error_wait()
            now_sec += 1
            if (now_sec % time) == 0:
                break
        return

    def time_callback(self, sec, callback, *args, **kwargs):
        log = self.logger.getChild(self.time_callback.__name__)
        if self.last_error_time != 0:
            ntime = int(time())
            sumval = int(ntime - self.last_error_time)
            if sumval >= sec:
                log.info("now time > %d call %s func" %
                         (sec, callback.func_name))
                self.last_error_time = 0   # reset to not error
                result = self.callback(callback, *args, **kwargs)
                if result:
                    log.info("%s func call success, %s " %
                             (callback.func_name, self.name))
                else:
                    log.info("%s func call fail, %s" %
                             (callback.func_name, self.name))
            else:
                result = False
        elif self.last_error_num == 0:
            self.last_error_num = 1
            result = self.callback(callback, *args, **kwargs)
            if result:
                log.info("%s func call success, %s, the is try call " %
                         (callback.func_name, self.name))
            else:
                log.info("%s func call failurd, %s, the is try call " %
                         (callback.func_name, self.name))
        else:
            result = False
        return result

    def callback(self, func, *args, **kwargs):
        log = self.logger.getChild(self.callback.__name__)
        try:
            return func(*args, **kwargs)
        except:
            log.error("%s call func: %s failured " %
                      (self.name, func.func_name), exc_info=True)
            return False

    def error_time_record(self):
        log = self.logger.getChild(self.error_time_record.__name__)
        if self.last_error_time == 0:
            self.last_error_time = int(time())
            self.last_error_num = 0
            log.info("error time is record: %d" % self.last_error_time)
            # record error time for timestamp
        else:
            ntime = int(time())
            if (ntime - self.last_error_time) > self.last_error_expired:
                # if last error time exceed , reset last time
                log.info("last error time > %d" % self.last_error_expired +
                         " ,last_error_time: %d " % self.last_error_time +
                         "ew last_error_time: %d reset for %s" % (ntime,
                                                                  self.name))
                self.last_error_time = ntime


class base_thread(Thread, base):
    def __init__(self):
        Thread.__init__(self)
        base.__init__(self)
        self.event = Event()
        # self.setDaemon(True)

    def stop(self):
        log = self.logger.getChild(self.stop.__name__)
        self.event.set()
        log.info("event is set, wait exit")

    def event_hit(self):
        log = self.logger.getChild(self.event_hit.__name__)
        if self.is_stop():
            log.info('thread exit')
            raise SystemExit

    def is_stop(self):
        return self.event.is_set()

    def init(self):
        log = self.logger.getChild(self.init.__name__)
        self.sleep(randint(1, 10))
        log.info('thread init done')


class delay_queue(base_thread):
    count = 0

    def __init__(self, queue, sec, pool):
        base_thread.__init__(self)
        delay_queue.count += 1
        self.queue = queue
        self.sec = int(sec)
        self.db = self.get_db(pool)
        self.setName(('delay_queue:%ds_' % self.sec) + str(delay_queue.count))
        self.now_sec = 0
        self.init_log()

    def get_num(self):
        num = delay_queue.count
        delay_queue.count += 1
        return num

    def put(self, items):
        log = self.logger.getChild(self.put.__name__)
        num = self.get_num()
        self.queue.put((num, items))
        log.debug("items number: %d is put, items: %s" % (num, items))

    def qcache(self, items):
        log = self.logger.getChild(self.qcache.__name__)
        if isinstance(items, tuple) and items:
            self.queue_cache_set(items)
            log.debug("get %ds items for mysql" % self.sec)
            return items
        else:
            self.queue_cache_status = 1
            log.debug("get %ds items for cache" % self.sec)
            return self.queue_cache_get()

    def cache_apply(self):
        log = self.logger.getChild(self.cache_apply.__name__)
        if self.queue_cache_get_status():
            return self.queue_cache_get()
        else:
            log.error("not get %ds items data, for mysql and cache, exiting")
            self.stop()

    def sleep(self, sec):
        self.time_callback(600, self.db.connect)
        super(delay_queue, self).sleep(sec)

    def split(self, items, max_num=20):
        length = len(items)
        num = length / max_num
        result = []
        location_num = 0
        if num == 0:
            result.append(items)
        else:
            for i in range(num):
                if i == (num - 1):
                    result.append(items[location_num:])
                else:
                    result.append(items[location_num: location_num + max_num])
                location_num += max_num
        return result

    def push_queue(self):
        if self.db.ping():
            items = self.db.get_items(delay=self.sec)
            item_list = self.split(self.qcache(items))
            map(self.put, item_list)
        else:
            self.error_time_record()
            item_list = self.split(self.cache_apply())
            map(self.put, item_list)

    def run(self):
        self.init()
        while True:
            self.push_queue()
            self.sleep(self.sec)


class redis_management(base):
    def __init__(self):
        base.__init__(self)
        self.name = "redis_management"
        self.redis = {}
        self.cache_db_result = {}
        self.info_cache = {}
        self.cache_timeout = 10
        self.lock = False
        self.lock_time = 0
        self.lock_time_out = 10
        self.init_log()

    def lock_release(self):
        log = self.logger.getChild(self.lock_release.__name__)
        self.lock = False
        log.debug("lock is release for %d" % self.lock_time)

    def lock_acquire(self):
        log = self.logger.getChild(self.lock_acquire.__name__)
        self.lock = True
        self.lock_time = time()
        log.debug("lock acquire lock for %d" % self.lock_time)

    def lock_out(self):
        if self.lock and (time() - self.lock_time) < self.lock_time_out:
            return False
        else:
            return True

    def lock_wait(self):
        log = self.logger.getChild(self.lock_wait.__name__)
        if not self.lock_out():
            log.debug("wait release lock for %d" % self.lock_time)
            while True:
                sleep(0.1)
                if self.lock_out():
                    log.debug("lock is release for %d " % self.lock_time)
                    break

    def info(self, hostid):
        hostid = util.Conversion_Type(hostid, str)
        return self.info_cache_get(hostid)

    def mkcache(self, hostid, over=False):
        log = self.logger.getChild(self.mkcache.__name__)
        if hostid not in self.cache.keys() or over:
            redis_con = self.get_redis_by_hostid(hostid)
            info = self.parse_info(self.get_info(redis_con))
            self.cache_add(hostid, info)
            log.debug("%s host get data :%s ..." %
                      (hostid, str(self.cache_get(hostid))[:100]))
            return self.cache_get(hostid).get('value')
        else:
            return self.cache_get(hostid).get('value')

    def info_cache_get(self, hostid):
        log = self.logger.getChild(self.info_cache_get.__name__)
        hostid = util.Conversion_Type(hostid, str)
        self.lock_wait()
        ntime = time()
        cache_dict = self.cache_get(hostid)
        try:
            if not cache_dict:
                log.debug("hostid: %s get redis info not " % hostid +
                          "cache,  set cache")
                return self.mkcache(hostid)
            if (ntime - cache_dict.get("date")) >= self.cache_timeout:
                log.debug("hostid: %s get redis info for" % hostid +
                          " cache, but cache failured")
                return self.mkcache(hostid, over=True)
            else:
                data = self.cache_get(hostid).get('value')
                log.debug("hostid: %s redis info cache hit, NOT INFO: %s" %
                          (hostid, not data))
                return data
        except:
            log.error("redis info get exception", exc_info=True)
            return None

    def get_info(self, connection):
        log = self.logger.getChild(self.get_info.__name__)
        self.lock_wait()
        self.lock_acquire()
        try:
            connection.send_command("info")
            respone = connection.read_response()
            self.lock_release()
            return respone
        except:
            self.lock_release()
            log.error("%s redis get info exception" % str(connection),
                      exc_info=True)
            return None

    def parse_sub_info(self, info, key="="):
        if key not in info:
            return info
        new = {}
        data = info.replace(",", " ") if "," in info else info
        data = data.split()
        for d in data:
            k, v = d.split("=")
            new[k] = self.parse_sub_info(v)
        return new

    def parse_info(self, info):
        if not info:
            return None
        result = {}
        data = re.findall(r'(.+):(.+)\r', info)
        for k, v in data:
            result[k] = self.parse_sub_info(v)
        return result

    def connection(self, **kwargs):
        log = self.logger.getChild(self.connection.__name__)
        try:
            connection = Connection(**kwargs)
            log.info("%s redis connection is successful" % str(kwargs['host']))
            return connection
        except:
            log.error("redis connection is exception, data: %s " % str(kwargs),
                      exc_info=True)
            return False

    def get_redis_by_hostid(self, hostid):
        log = self.logger.getChild(self.get_redis_by_hostid.__name__)
        if hostid not in self.redis.keys():
            log.debug("reids object don't get redis, connect redis and re"
                      "turn hostid: %s" % hostid)
            host = self.cache_db_result.get(hostid)
            if not host:
                log.error("not get host data: %s " % str(host))
                return
            self.redis[hostid] = self.connection(**host)
            return self.redis.get(hostid)
        else:
            log.debug("redis object get scuess, hostid: %s" % hostid)
            return self.redis.get(hostid)


class redis_agent(base_thread):
    def __init__(self, redis, pool):
        base_thread.__init__(self)
        self.name = "redis_agent"
        self.db = self.get_db(pool)
        self.redis = redis
        self.cache_timeout = 3600
        self.init_log()

    def init(self):
        '''query mysql  save host table to cache'''
        hosts = self.db.get_hosts()
        if not hosts:
            return
        for host in hosts:
            self.cache_add(str(host[0]), {"host": host[1], "port": host[2],
                                          "password": host[3], "db": host[4]})
        self.redis.cache_db_result = self.cache

    def get_host_set_cache(self, hostid):
        log = self.logger.getChild(self.get_host_set_cache.__name__)
        host = self.db.get_host(hostid)
        if not host:
            log.error("mysql queue hostid: %s not get" % hostid)
            return
        self.cache_add(host[0], {"host": host[1], "port": host[2],
                                 "password": host[3], "db": host[4]})
        return self.cache_get(hostid)

    def get_host_cache(self, hostid):
        ntime = time()
        if hostid not in self.cache.keys():
            return self.get_host_set_cache(hostid)

        if (ntime - self.cache_get(hostid).get('date')) >= self.cache_timeout:
            return self.get_host_set_cache(hostid)
        return self.cache_get(hostid).get("value")

    def connect_redis(self):
        log = self.logger.getChild(self.connect_redis.__name__)
        for i in self.cache.keys():
            i = str(i)
            log.info("hostid: %s redis connect" % i)
            self.redis.redis[i] = self.redis.connection(
                **self.cache_get(i).get('value'))

    def ping(self):
        log = self.logger.getChild(self.ping.__name__)
        for c in self.redis.redis.keys():
            if not self.redis.get_info(self.redis.redis.get(c)):
                log.error("hostid: %s redis is closed reconnecting" % c)
                self.redis.redis[c] = self.redis.connection(
                    **self.cache_get(c).get('value'))

    def run(self):
        self.init()
        self.connect_redis()
        while True:
            self.ping()
            self.sleep(30)


class collect(base_thread):
    ''' collect data and insert to db, but it is thread,
    Open this thread will no matter the thread, not waiting thread exits'''
    count = 0

    def __init__(self, items_queue, insert_queue, redis):
        base_thread.__init__(self)
        collect.count += 1
        self.items_queue = items_queue
        self.insert_queue = insert_queue
        self.sql = {}
        self.sql[HISTORY_STRING] = []
        self.sql[HISTORY_NUMERIC] = []
        self.data = []
        self.redis = redis
        self.info = {}
        self.error = None
        self.lock = RLock()
        self.setName('collect_agent_' + str(collect.count))
        self.num = 0
        self.init_log()

    def collect_info(self, items):
        log = self.logger.getChild(self.collect_info.__name__)
        if isinstance(items, list) or isinstance(items, tuple):
            for item in items:
                if len(item) != 4:
                    continue
                info = self.redis.info(item[2])
                if info:
                    value = util.value_get(info, *item)
                    if value:
                        self.data.append(value)
        log.debug("items number: %d collect done, data: %s" % (self.num,
                                                               self.data))

    def type_process(self):
        log = self.logger.getChild(self.type_process.__name__)
        now_time = int(time())
        for d in self.data:
            if d[1] is None:
                log.debug("itemid: %s collect info is failured, " % str(d[0]) +
                          "items number: %d" % self.num)
                continue
            self.sql[util.history(d[2])].append((0, d[0], now_time, d[1]))
            log.debug("items number: %d data split table process" % self.num +
                      " is done sql: %s " % str(self.sql))

    def put_insert_queue(self):
        log = self.logger.getChild(self.put_insert_queue.__name__)
        for k in self.sql.keys():
            sql = 'insert into %s values ' % k
            if self.sql[k]:
                d = str(self.sql[k])[1:-1].replace('L', '')
                sql = sql + d
                self.insert_queue.put((self.num, sql))
                log.debug("items number: %d put sql to insert queue, data %s" %
                          (self.num, sql))

    def clear(self):
        log = self.logger.getChild(self.clear.__name__)
        log.debug("clean self.sql and self.data collect is reset")
        for k in self.sql.keys():
            self.sql[k] = []
        self.data = []

    def collect_main(self):
        log = self.logger.getChild(self.collect_main.__name__)
        try:
            self.clear()
            self.num, items = self.items_queue.get()
            log.debug("collect start for items, itmes number: %d" % self.num)
            self.collect_info(items)
            self.type_process()
            self.put_insert_queue()
            log.debug("collect is complete for items number: %d" % self.num)
        except Collect_Error, e:
            log.error('collect exception: %s, queue number: %d ' % (str(e),
                                                                    self.num))

    def run(self):
        log = self.logger.getChild(self.run.__name__)
        self.init()
        with self.lock:
            log.debug('collect init done, collect start')
            while True:
                if self.items_queue.empty():
                    self.sleep(0.1)
                else:
                    self.collect_main()


class insert_thread(base_thread):
    count = 0

    def __init__(self, insert_queue, pool):
        base_thread.__init__(self)
        insert_thread.count += 1
        self.insert_queue = insert_queue
        self.setName('mysql_insert_' + str(insert_thread.count))
        self.db = self.get_db(pool)
        self.queue_cache_init([])
        self.num = 0
        self.init_log()

    def insert_sql(self, sql):
        log = self.logger.getChild(self.insert_sql.__name__)
        try:
            self.db.execute(sql, echo=False)
            self.db.con.commit()
            log.debug("insert sql scuess, items number: %d sql data: %s" %
                      (self.num, sql))
            return True
        except Exception, e:
            log.error("insert sql exception: %s, items number: %d sql data: %s"
                      % (str(e), self.num, sql), exc_info=True)
            self.queue_cache_add(sql)
            return False

    def qcache(self, sql):
        log = self.logger.getChild(self.qcache.__name__)
        if self.db.ping():
            self.insert_sql(sql)
        else:
            self.error_time_record()
            self.queue_cache_status = 1
            log.debug("sql add to cache, sql data: %s" % sql)
            self.queue_cache_add(sql)
            self.time_callback(600, self.db.connect)

    def cache_apply(self):
        log = self.logger.getChild(self.cache_apply.__name__)
        log.debug("try insert mysql for cache, data: %s" %
                  str(self.queue_cache))
        if self.queue_cache_get_status() and self.db.ping():
            log.info("Database Recovery update the cache to the database")
            for i in range(len(self.queue_cache_get())):
                sql = self.queue_cache.pop()
                self.insert_sql(sql)

    def stop(self):
        log = self.logger.getChild(self.stop.__name__)
        if self.queue_cache:
            ntime = strftime("%Y%m%d-%H-%M-%S")
            filename = sqlfile + "." + ntime + self.name
            log.info("insert_sql cached for sql , write file : %s" % filename)
            with open(filename, "w+") as f:
                for line in self.queue_cache:
                    f.write(line + "\n")
        super(insert_thread, self).stop()

    def run(self):
        log = self.logger.getChild(self.run.__name__)
        self.init()
        while True:
            if self.insert_queue.empty():
                self.sleep(0.1)
            else:
                self.num, sql = self.insert_queue.get()
                log.debug("get sql items number: %d for insert queue," %
                          (self.num) + "data: %s" % sql)
                self.qcache(sql)
                self.cache_apply()


class discovery(base):
    pass


class db(base):
    def __init__(self, pool_size=5, con=None, **kwargs):
        base.__init__(self)
        self.pool_size = pool_size
        self.con = con
        self.cursor = None
        self.config = self.set_config(**kwargs)
        self.pool = None
        self.name = 'db'
        self.init_log()
        if not self.con:
            self.pool_connect()
            self.con = self.pool.connection()

    def set_config(self, **kwargs):
        dbconfig = {}
        dbconfig['host'] = kwargs.pop('host', 'localhost')
        dbconfig['port'] = kwargs.pop('port', 3306)
        dbconfig['user'] = kwargs.pop('user', 'root')
        dbconfig['passwd'] = kwargs.pop('passwd', '')
        dbconfig['charset'] = kwargs.pop('charset', 'utf8')
        dbconfig['db'] = kwargs.pop('db', 'mysql')
        return dbconfig

    def pool_connect(self):
        log = self.logger.getChild(self.pool_connect.__name__)
        try:
            self.pool = PooledDB(MySQLdb, self.pool_size, **self.config)
            log.info('db.pool_connect MySQL connection pool ' +
                     'connect success')
        except Exception, e:
            log.error('db.pool_connect error: ' + str(e) + ' Script Exi' +
                      'ting...', exc_info=True)
            print "Database Error: ", sys.exc_info()[1]
            raise SystemExit

    @util.exception
    def connect(self):
        log = self.logger.getChild(self.connect.__name__)
        try:
            self.con = MySQLdb.connect(**self.config)
            log.info('db.connect MySQL connect success')
            return True
        except Exception, e:
            log.error('db.connect error: ' + str(e), exc_info=True)
            return False

    @util.exception
    def ping(self):
        log = self.logger.getChild(self.ping.__name__)
        self.con.ping()
        log.debug('db testing.....')
        return True

    @util.exception
    def execute(self, sql, echo=True):
        log = self.logger.getChild(self.execute.__name__)
        cursor = self.con.cursor()
        result = None
        if not self.ping():
            return
        try:
            cursor.execute(sql)
            log.debug('execute sql: %s' % sql)
        except:
            log.error('db execute sql error, sql: %s ' % sql,
                      exc_info=True)
        if echo:
            try:
                result = cursor.fetchall()
            except:
                log.error("db execute sql output error", exc_info=True)
        cursor.close()
        return result

    @util.exception
    def get_item(self, itemid, status=ITEM_NORMAL):
        '''To get the items in the table itemid, key_ hostid, value_type
        columns Return a data at a time
        table: items
        sql: select itemid, key_, hostid,value_type from items where itemid=
        items and status = 1
        return (items) or ()'''

        log = self.logger.getChild(self.get_item.__name__)
        data = []
        sql = 'select itemid, key_, hostid,value_type from item where ' + \
            'itemid={0} and status={1}'.format(itemid, status)
        result = self.execute(sql)
        data = result[0] if result else []
        log.debug('get item for itemid: %s, data: %s ' %
                  (str(itemid), str(result)))
        return data

    @util.exception
    def get_items(self, delay=None, status=ITEM_NORMAL):
        '''To get the items in the table itemid, key_ hostid, value_type
        columns Return to multiple data at a time
        table: items
        sql: select itemid,key_,hostid,value_type from items where status
        =1 (if delay is set add ' and delay=delay')
        return: ((item1), (item2), (item3), ...), or ()'''

        log = self.logger.getChild(self.get_items.__name__)
        sql = "select itemid,key_,hostid,value_type from item where " + \
            "status={0}".format(status)
        if delay:
            dsql = ' and delay={0}'.format(delay)
            sql = sql + dsql
        log.debug('get items for status={0} and delay={1}'.format(
            status, delay))
        return self.execute(sql)

    @util.exception
    def get_delay(self, status=ITEM_NORMAL):
        '''Get acquisition time interval
        table: items
        sql: select delay from items where status= 1
        return: set([sec1, sec2, sec3]) or ()
        row: delay and  status = 0'''

        log = self.logger.getChild(self.get_delay.__name__)
        sql = 'select delay from item where status={0}'.format(status)
        r = self.execute(sql)
        log.debug('get delay for status = {0}'.format(status))
        print r
        return set([i[0] for i in r])

    @util.exception
    def get_host(self, hostid, status=HOST_NORMAL):
        '''Get a host record in the field return way
        table: hosts
        sql: select hostid,ip,port,password,db from hosts where hostid =
        hostid and status = 1
        return: {hostid: {host, port, password, db}} or {}'''

        log = self.logger.getChild(self.get_host.__name__)
        data = {}
        sql = 'select hostid,ip,port,password,db from host' + \
            ' where hostid={0} and status={1}'.format(hostid, status)
        result = self.execute(sql)
        if result:
            for row in result:
                data[row[0]] = {"host": row[1], "port": row[2], "db": row[3],
                                "password": row[4]}
        log.debug("get host for hostid: %s, result: %s... " %
                  (str(hostid), str(data)[:100]))
        return data

    @util.exception
    def get_hosts(self, status=HOST_NORMAL):
        ''' get db all host for status = 1 (1 is normal)
        table: hosts
        sql: select hostid,ip,port,password,db from hosts where status = 1
        return {hostid: {host, port, password, db}'''

        data = {}
        log = self.logger.getChild(self.get_hosts.__name__)
        sql = 'select hostid,ip,port,password,db from host where status' + \
            '={0}'.format(status)
        log.debug("get host for status = %d" % status)
        result = self.execute(sql)
        if result:
            for row in result:
                data[row[0]] = {'host': row[1], 'port': row[2],
                                'password': row[3], 'db': row[4]}
        log.debug("get hosts data is Null: %s " % (not data))
        return data

    def __str__(self):
        return "mysql object: mysql://%s:%d/%s" % (self.config['host'],
                                                   self.config['port'],
                                                   self.config['db'])

    __repr__ = __str__

    def close(self):
        log = self.logger.getChild(self.close.__name__)
        self.con.close()
        log.info('close connect')


class init_main(object):
    def __init__(self):
        self.items_queue = Queue()
        self.insert_queue = Queue()
        self.delay_pool = []
        self.collect_pool = []
        self.insert_pool = []
        self.redis_pool = []
        self.collect_pool_size = 5
        self.db = db(passwd=MySQL_password, db=MySQL_db)
        self.redis = redis_management()

    def init_redis_agent(self):
        self.redis_pool.append(redis_agent(self.redis, self.db))
        map(lambda x: x.start(), self.redis_pool)
        sleep(1)

    def init_collect_pool(self):
        for n in range(self.collect_pool_size):
            self.collect_pool.append(collect(self.items_queue,
                                             self.insert_queue, self.redis))
        map(lambda x: x.start(), self.collect_pool)
        sleep(2)

    def init_delay_pool(self):
        delay = self.db.get_delay()
        print delay
        for n in delay:
            self.delay_pool.append(delay_queue(self.items_queue, n,
                                               self.db))
        map(lambda x: x.start(), self.delay_pool)
        sleep(2)

    def init_insert_thread(self):
        for i in range(2):
            self.insert_pool.append(insert_thread(self.insert_queue,
                                                  self.db))
        map(lambda x: x.start(), self.insert_pool)
        sleep(2)

    def signal(self, signal, frame):
        self.redis.lock_release()
        for t in (self.delay_pool + self.collect_pool + self.insert_pool +
                  self.redis_pool):
            t.stop()
        os.kill(os.getpid(), 9)
        print (self.delay_pool + self.collect_pool + self.insert_pool +
               self.redis_pool)
        exit(1)

    def update_host_cache(self):
        hosts = self.db.get_hosts()
        if not hosts:
            return
        for h in hosts.keys():
            self.redis.cache_db_result[util.Conversion_Type(h, str)] = hosts[h]

    def update_delay_works(self):
        pass

    def stat(self, signal, frame):
        log = logger.getChild('main.stat')
        log.info("Thread running info")
        stats = {
            'delay thread': {'t': self.delay_pool, 's': {'live': 0, 'dead': 0}
                             },
            'collect thread': {'t': self.collect_pool, 's': {'live': 0,
                                                             'dead': 0}},
            'mysql insert thread': {'t': self.insert_pool, 's': {'live': 0,
                                                                 'dead': 0}},
            'redis conn thread': {'t': self.redis_pool, 's': {'live': 0,
                                                              'dead': 0}}}
        for k in stats.keys():
            log.info(k)
            for t in stats[k]['t']:
                if t.is_alive():
                    stats[k]['s']['live'] += 1
                else:
                    stats[k]['s']['dead'] += 1
            stats[k]['s']['all'] = len(stats[k]['t'])
            log.info((" " * 4) + "live:" + str(stats[k]['s']['live']) +
                     " dead:" + str(stats[k]['s']['dead']) + ' all: ' +
                     str(stats[k]['s']['all']))

    def main(self):
        # self.init_redis_agent()
        self.update_host_cache()
        self.init_delay_pool()
        print 'delay thread pool init done.'
        self.init_collect_pool()
        print 'collect thread pool init done'
        self.init_insert_thread()
        print 'mysql insert thread pool init done'
        while True:
            self.update_host_cache()
            sleep(60)
            # print self.redis, id(self.redis)
            # r = self.redis.redis
            # for i in r.keys():
            #     print i, id(r[i])
            # print "Queue"
            # print "_" * 100
            # print "Insert:", self.insert_queue.qsize()
            # print "items:", self.items_queue.qsize()
            # print strftime("%Y%m%d %H:%S:%M"), 'thread status'
            # print "_" * 100
            # print 'delay thread list:', self.delay_pool
            # print 'insert thread list', self.insert_pool
            # print 'collect thread list:', self.collect_pool


if __name__ == '__main__':
    pid = os.fork()
    if pid != 0:
        print "The program has the background, pid: %d" % pid
        sleep(1)
        exit(0)
    stdfile = '/tmp/rstdfile'
    stdfile_open = open(stdfile, 'w')
    sys.stdout = stdfile_open
    sys.stderr = stdfile_open
    init = init_main()
    signal.signal(signal.SIGINT, init.signal)
    signal.signal(signal.SIGHUP, init.stat)
    init.main()
