#coding=utf-8
'''
Created on 2016年3月4日
'''
import logging
import os
import configparser
from setting_base.setting import C
import tornado
import time
import signal
import importlib
from tornado.options import options
from util_base import ufo_utils
from util_base.server_context import ServerContext
from tornado.web import Application
from util_base.db_utils import MysqlConnPool
from common_base.cmd import CMD


from logging import config as logging_config

app = None


class UFOApplication(Application):
    def __init__(self, handlers, template_path = '', static_path=''):
        
        self.REQUEST_TIMEOUT_MILLS = 10000 # 请求超时的毫秒设置，10秒
        self.app_name = ''
        
        # 将application对象放入ServerContext
        ServerContext.application = self
        
        # tornado application settings
        tornado_settings = dict(gzip=True,
                    debug=options.debug,
                    # 静态资源配置
                    static_url_prefix='/static/',
                    static_path=os.path.join(os.path.dirname(__file__), "static"),)
        if template_path:
            tornado_settings['template_path'] = template_path
        if static_path:
            tornado_settings['static_path'] = static_path    
        
        tornado.web.Application.__init__(self, handlers=handlers, **tornado_settings)
        
        # log tracker
#         self.tracker = Tracker('track')
#         self.sys_logger = Tracker('root')
#         self.internal_tracker = Tracker('internalTrack')
        
        # 数据库相关
        self.db_conns = ufo_utils.get_db_conns(C, use_pool=True)  # @UndefinedVariable

        self.alive_base_info = C('ALIVE_BASE')
        
        if hasattr(self, 'use_mysql_pool') and getattr(self, 'use_mysql_pool'):
            ufo_utils.set_mysql_pool(C)
        
        # 加载processor
        load_processor()
        
        # 计算需要加载
        self.cmd_auth_status= [ CMD.__dict__[x] for x in CMD.__dict__ if x[:4] == 'CMD_']
        # cache related

        # initialize system envrionment

        # scheduler if need
        

    def set_rpc_client(self, client):    
        self.rpc_client = client
        
    
    def log_request(self, handler):
        """Writes a completed HTTP request to the logs.
    
        By default writes to the python root logger.  To change
        this behavior either subclass Application and override this method,
        or pass a function in the application settings dictionary as
        ``log_function``.
        """
        
        access_log = handler.tracker
        
        if "log_function" in self.settings:
            self.settings["log_function"](handler)
            return
        if handler.get_status() < 400:
            log_method = access_log.info
        elif handler.get_status() < 500:
            log_method = access_log.warn
        else:
            log_method = access_log.error
        request_time = 1000.0 * handler.request.request_time()
        # 当请求超过10秒时，客户端已经设置为请求超时
        if request_time > self.REQUEST_TIMEOUT_MILLS:
            request_time = self.REQUEST_TIMEOUT_MILLS
        log_method("HTTPResult:%d %s %.2fms" % (handler.get_status(),
                   handler._request_summary(), request_time))
        

def initlog(config_file, port):
    '''xw
    '''
    
    def __fileConfig(config, disable_existing_loggers=True):
        """
        Read the logging configuration from a ConfigParser-format file.
    
        This can be called several times from an application, allowing an end user
        the ability to select from various pre-canned configurations (if the
        developer provides a mechanism to present the choices and load the chosen
        configuration).
        """
    
        formatters = logging_config._create_formatters(config)
    
        # critical section
        logging._acquireLock()
        try:
            logging._handlers.clear()
            del logging._handlerList[:]
            # Handlers add themselves to logging._handlers
            handlers = logging_config._install_handlers(config, formatters)
            logging_config._install_loggers(config, handlers, disable_existing_loggers)
        finally:
            logging._releaseLock()
        
    # 配置文件
    cp = configparser.ConfigParser(None)
    if hasattr(config_file, 'readline'):
        cp.readfp(config_file)
    else:
        cp.read(config_file, encoding='utf-8')
    
    # 更改handler_sys和handler_track两个的args中的文件名, 把port拼接上
    for item in ['handler_sys', 'handler_track', 'handler_internalTrack']:
        args = list(eval(cp.get(item, 'args').strip()))
        log_dir = os.path.join(C("BASIC")["LOG_DIR"], os.path.dirname(args[0]))
        args[0] = os.path.join(C("BASIC")["LOG_DIR"], '%s_%s'% (args[0], port))
        cp.set(item, 'args', str(tuple(args)))
        # 初始化日志目录
        if not os.path.exists(log_dir):
            os.makedirs(log_dir)
    
    __fileConfig(cp)
    
    
# added a handler for kill or other interrupt
def tata_shutdown_handler(sig, frame):
        logging.warning('Caught signal: %s', sig)
        tornado.ioloop.IOLoop.instance().add_callback(close_httpserver)

def close_httpserver():
        deadline = time.time() + 5
        def stop_loop():

            now = time.time()
            io_loop = tornado.ioloop.IOLoop.instance()
            if now < deadline and (io_loop._callbacks or io_loop._timeouts):
                io_loop.add_timeout(now + 1, stop_loop)
            else:
                io_loop.stop()
                
                #关闭数据库连接
                for k, v in app.db_conns.iteritems():
                    if hasattr(v, 'close'):
                        v.close()
                # 关闭连接池
                MysqlConnPool.lock(clear=True)
                logging.info('Shutdown')
        stop_loop()
        logging.info(".....close_httpserver():ready")

def set_signals():
    '''
    信号处理器
    '''
    signal.signal(signal.SIGINT, tata_shutdown_handler)
    signal.signal(signal.SIGTERM, tata_shutdown_handler)


def set_app(http_server_app):
    global app
    app = http_server_app
    
def load_processor():
    '''
    加载processor, 将cmd_id和processor的关系映射在urls.processor_mapping中
    '''
    
    base_dir = os.getcwd()
    processors = set()
    for dirpath, dirnames, filenames in os.walk(base_dir + '/processor'):

        for filename in filenames:
            filepath = os.path.join(dirpath, filename)
            if filepath.endswith('.py'):
                processors.add(filepath[len(base_dir) + 1 :-len('.py')].replace('/', '.').replace('\\', '.'))
    
    for item in processors:
        importlib.import_module(item)

