# uncompyle6 version 3.9.0
# Python bytecode version base 2.7 (62211)
# Decompiled from: Python 3.8.3 (tags/v3.8.3:6f8c832, May 13 2020, 22:37:02) [MSC v.1924 64 bit (AMD64)]
# Embedded file name: /home/liup/work/code/work/spider/log.py
# Compiled at: 2018-07-11 01:35:44
"""
文件名：setting.py
功能：爬虫多线程运行配置文件；从配置文件spider.cfg读取配置选项和值

代码历史：
2014-06-05：庞 威，创建代码
2017-01-05: 段毅飞 增加 kafka 日志处理
"""
import os, sys
try:
    import ujson as json
except ImportError:
    import json

import time, logging, traceback, logging.handlers
from multiprocessing import Lock
try:
    import setting
    spider_id = setting.SPIDER_ID
    spider_ip = setting.SPIDER_IP
except ImportError:
    spider_id = ''
    spider_ip = ''

class SpiderRotatingFileHandler(logging.handlers.RotatingFileHandler):
    """
    文件回滚日志处理器
    特点:
        1. 利用备份文件修改时间做判断 修复了多进程下同时多个日志文件被写入的bug
        2. 可选项 使用json格式记录日志文件
    
    """

    def __init__(self, filename, mode='a', maxBytes=0, backupCount=0, encoding=None, delay=0, is_json=False):
        logging.handlers.RotatingFileHandler.__init__(self, filename, mode, maxBytes, backupCount, encoding, delay)
        self.Formatter = logging.Formatter()
        self.my_lock = Lock()
        self.is_json = is_json
        if self.is_json:
            self.format = self.json_format

    def json_format(self, record):
        """
        json 格式化日志
        @record: 日志记录对象
        type: logging.LogRecord
        """
        record.asctime = self.Formatter.formatTime(record)
        message = record.getMessage()
        log_data = {}
        try:
            log_data = json.loads(message)
            if not isinstance(log_data, dict):
                log_data = {}
        except Exception as e:
            exc_info = traceback.format_exc()

        if not log_data:
            log_data.update({'_message': message})
        log_data.update({'spider_id': spider_id, 
           'spider_ip': spider_ip})
        log_record_basic_fields = [
         'levelname', 'filename', 'lineno', 
         'name', 
         'created', 'asctime', 'process']
        for attr in log_record_basic_fields:
            value = getattr(record, attr, '')
            log_data.update({('_{}').format(attr): value})

        try:
            result = json.dumps(log_data, ensure_ascii=False)
        except:
            result = json.dumps(log_data)

        return result

    def doRollover(self):
        """
        Do a rollover, as described in __init__().
        """
        with self.my_lock:
            if self.stream:
                self.stream.close()
                self.stream = None
            lock_file = '%s.lock' % self.baseFilename
            max_modify_interval = 3
            do_flag = 0
            if not os.path.exists(lock_file):
                with open(lock_file, 'w'):
                    pass
                do_flag = 1
            elif time.time() - os.stat(lock_file).st_mtime > max_modify_interval:
                do_flag = 1
            if do_flag:
                for i in range(self.backupCount - 1, 0, -1):
                    sfn = '%s.%d' % (self.baseFilename, i)
                    dfn = '%s.%d' % (self.baseFilename, i + 1)
                    if os.path.exists(sfn):
                        if os.path.exists(dfn):
                            os.remove(dfn)
                        os.rename(sfn, dfn)

                dfn = self.baseFilename + '.1'
                if os.path.exists(dfn):
                    os.remove(dfn)
                if os.path.exists(self.baseFilename):
                    os.rename(self.baseFilename, dfn)
                with open(lock_file, 'w'):
                    pass
        if not self.delay:
            self.stream = self._open()
        return


def make_dispatch_log(exc_text='', code=0, extra={}):
    """调度日志生成
    @error: 失败原因
    @code: 0(成功) or other ..
    """
    _log = {'exc_text': exc_text, 
       'code': code, 
       'kafka_topic': 'spider_dispatch'}
    if extra:
        _log.update(extra)
        if code == 0:
            _log.pop('config_content', '')
    return json.dumps(_log)


def make_config_log(exc_text='', config_id=-1, extra={}):
    """调度日志生成
    @error: 失败原因
    @code: 0(成功) or other ..
    """
    if not config_id:
        config_id = -1
    _log = {'kafka_topic': 'spider_config', 'config_id': config_id, 
       'exc_text': exc_text}
    if extra:
        _log.update(extra)
    return json.dumps(_log)


def make_spidercode_log(exc_text='', config_id=-1, extra={}):
    """调度日志生成
    @error: 失败原因
    @code: 0(成功) or other ..
    """
    if not config_id:
        config_id = -1
    _log = {'kafka_topic': 'spider_code', 'config_id': config_id, 
       'exc_text': exc_text}
    if extra:
        _log.update(extra)
    return json.dumps(_log)


logger = logging.getLogger()
current_file_path = os.path.dirname(os.path.abspath(sys.path[0]))
log_path = os.path.join(current_file_path, 'log')
if not os.path.isdir(log_path):
    os.makedirs(log_path)
fp = SpiderRotatingFileHandler(os.path.join(log_path, 'debug.log'), maxBytes=10485760, mode='a', backupCount=100)
logger.addHandler(fp)
std = logging.StreamHandler(sys.stdout)
logger.addHandler(std)
sfp = SpiderRotatingFileHandler(os.path.join(log_path, 'bak.log'), maxBytes=2097152, backupCount=2, is_json=True)
formatter = logging.Formatter('[%(asctime)s] [%(levelname)s] [%(filename)s] [%(lineno)d] - %(message)s')
fp.setFormatter(formatter)
std.setFormatter(formatter)
daemon_files_preserve = [a
 fp.stream, sfp.stream, std.stream]
logger.setLevel(logging.NOTSET)
if __name__ == '__main__':
    logger.debug('hello')