#_*_coding:utf-8_*_
import os,time
import redis  
import threading
import logging

###Login Format config
logging.basicConfig(level=logging.DEBUG,
                    format='%(asctime)s %(filename)s[line:%(lineno)d] %(levelname)s %(message)s',
                    datefmt='%Y-%m-%d %H:%M:%S',
                    filename='debug.log',
                    filemode='w')

def Tail(redis,ID,path,interval=0.5):
        f = open(path, 'r')
        record_inode = os.stat(path).st_ino  #inode number
        f.seek(0,2) #跳到文件最后开始同步
        while True:
            #line = f.read(10000) #每次读取10000字节
            lines = f.readlines(100) #每次读10行
            all_str = ''
            if lines: #判断文件是否存在更新
                for line in lines:
                    all_str+=line
                redis.lpush('tq', '%s %s'%(ID,all_str)) #汇总多行一起添加队列
            else:#如果文件没有更新，作以下处理（考虑到日志会切割，滚动）
                if os.path.exists(path): ### File exists 
                    if record_inode != os.stat(path).st_ino:
                        logging.info('%s File Change'%ID)
                        record_inode = os.stat(path).st_ino  # record new inode number
                        f.close() ## close file 
                        f = open(path, 'r')
                        f.seek(0,0)
                        logging.info('Open File %s Change'%ID)
                    else:
                        pass
                else:
                    pass
            time.sleep(interval)
        f.close()

def main():
    pool=redis.ConnectionPool(host='127.0.0.1',port=6379,db=0) #Redis 队列服务器配置
    conn_redis = redis.StrictRedis(connection_pool=pool)
    filelist = [#ID 目录名称，logpath 日志路径， interval 监听间隔日志产生越快间隔应该越小
        {'ID':'debug','logpath':'debug.log','interval':5},
    ]
    ###初始化同步线程，针对每个文件启动一个线程
    threads = []
    for th in range(len(filelist)):
        th = threading.Thread(target=Tail, 
                              name=filelist[th]['ID'],
                              args=(conn_redis,filelist[th]['ID'],filelist[th]['logpath'],filelist[th]['interval'])
                              )
        threads.append(th)
    logging.info('Init threads!')
    for t in threads:
        t.start()
    logging.info('Start threads!')
    for j in threads:
        j.join()
    logging.info('Join threads!')

if __name__ == '__main__':
    main()