'''
Created on Feb 24, 2016

@author: JimmyMo
'''
from __future__ import print_function
import re
import time
import processor
import redis
import json
import patterns_gen
import sys
import traceback
    
def format_log(log_patterns):
#     try:
#         no_comments_logs = [elem for elem in log_patterns if not elem.startswith("###")]
        formatted_logs_with_newline = [elem.strip('\n') + '\\n' for elem in log_patterns]
#         encode_logs = [elem.decode() for elem in formatted_logs_with_newline]
        analyzed_logs = [processor.analyse_log(elem) for elem in formatted_logs_with_newline]
        
        return analyzed_logs
#     except Exception as e:
#         e_full = "\n".join([stackLine for stackLine in __import__("traceback").format_stack() if not 'pydev' in stackLine])
#         print(e_full)
#         print(e)
#         print(e)
def pre_process_log(log_patterns):
    li = [elem.replace("'","\\'") for elem in log_patterns]
    return li

def read_log_pattern_from_mem(pattern_path):
    log_patterns = patterns_gen.generate_patterns(pattern_path)
    return log_patterns
    pass
def read_log_pattern_from_file(path):
#     path = "log_patterns.conf";
    with open(path, "r") as file:
#         global log_patterns
        log_patterns = file.readlines()
    return log_patterns
def get_group(devicename, portname):
    if  devicename.startswith("devicename0"):
        group = "AAA"
    elif devicename.startswith("devicename1"):
        group = "BBB"
    elif devicename.startswith("devicename2"):
        group = "CCC"
    elif devicename.startswith("devicename3"):
        group = "DDD"
    else:
        group = "EEE"
    return group
def get_map_key(group, devicename, ip, portname):
#     key = "GRP:%s:DEV:%s:PRT:%s" % (group, devicename, portname)
    key = "d:%s:ip:%s:o:%s" % (devicename, ip, portname)
    return key
    pass
def emit_log_to_redis(url, li):
    r = redis.Redis(host='localhost',port=6379,db=0)
#     ddd = json.loads()
#     now = time.clock()
#     ddd = {}
#     for i, line in enumerate(li):
#         ddd[i] = json.loads(line)
#     print("ttttttime...%s..." % (now - time.clock()))
    pipe = r.pipeline()
    counter = 0
    pipe.multi()
    need_trim = False    
    counter_endpoint = 0
    counter_history = 0
    counter_mapping = 0                                                                                                                                                                               
    for i, line in enumerate(li):
        counter = counter + 1
        s1 = line.replace("'", '"')
        
#         print(s1)
        d = json.loads(s1)
        
        if ('endpoint' not in d):
            pass
            counter_endpoint = counter_endpoint + 1
            d['seq'] = counter_endpoint
            devicename = d['devicename']
            ip = d['ip']
            portname = d['object']
            groupname = get_group(devicename, portname)
            key = get_map_key(groupname, devicename, ip, portname)
            key = "mapping:" + key
        elif ('history' in d):
            pass
            counter_history = counter_history + 1
            d['seq'] = counter_history
            devicename = d['endpoint']['devicename']
            ip = d['endpoint']['ip']
            portname = d['object']
            groupname = get_group(devicename, portname)
            key = get_map_key(groupname, devicename, ip, portname)
            key = "msg:p:" + key
#             d['tags']["group"] = groupname
        else:
            counter_mapping = counter_mapping + 1
            d['seq'] = counter_mapping
            devicename = d['endpoint']['devicename']
            ip = d['endpoint']['ip']
            portname = d['object']
            groupname = get_group(devicename, portname)
            key = get_map_key(groupname, devicename, ip, portname)
            key = "msg:c:" + key
#             d['tags']["group"] = groupname

        s2 = json.dumps(d)
        pipe.set(key, s2)
        if (counter % 10000 == 0):
            print ("emit %s records to redis...", counter)
#         pipe.hset("h1", key, line)
#         r.hset("h1", key, line)
    now = time.clock()
    print("inserting to Redis...")
    pipe.execute()
    print("done inserting")
    print("total elapsed %f..." % (time.clock() - now))
    pass
def emit_log_to_file(path, li):
    now = time.clock()
#     log_patterns = read_log_pattern_from_file()
#     li = format_log(log_patterns)
    print("writting to file...")
    with open(path, "a+") as log_file:
        log_file.writelines(li)
    print("done writting...")
    print("total elapsed time %f..." % (time.clock() - now))
        

def job(log_path, pattern_path):
#     log_patterns = read_log_pattern_from_file(pattern_path)
    try:
        print("reading log pattern...")
        log_patterns = read_log_pattern_from_mem(pattern_path)
        print("done reading...")
        print("processing log...")
        li = pre_process_log(log_patterns)
        li = format_log(li)
        print("done processing...")
    #     emit_log_to_file(log_path, li)
        emit_log_to_redis("url", li)
        print('.', end="")
    except Exception as e:
            print("Printing only the traceback above the current stack frame")
            print("".join(traceback.format_exception(sys.exc_info()[0], sys.exc_info()[1], sys.exc_info()[2])))
            print("Printing the full traceback as if we had not caught it here...")
            print(format_exception(e))
#                 prior_event_bytes = self.rp_client.mget(keys)
    def format_exception(e):
        exception_list = traceback.format_stack()
        exception_list = exception_list[:-2]
        exception_list.extend(traceback.format_tb(sys.exc_info()[2]))
        exception_list.extend(traceback.format_exception_only(sys.exc_info()[0], sys.exc_info()[1]))
    
        exception_str = "Traceback (most recent call last):\n"
        exception_str += "".join(exception_list)
        # Removing the last \n
        exception_str = exception_str[:-1]

        return exception_str      
#     print('.', end="",flush=True)

# def start(interval, log_path, pattern_path):
#     schedule.every(interval).seconds.do(job, log_path, pattern_path)
#     while 1:
#         schedule.run_pending()
#         time.sleep(1)
      
if __name__ == "__main__":
    read_log_pattern_from_file()