# -*- coding: utf-8 -*-
import json
import os
import re
import time
import traceback
from datetime import datetime, timedelta, timezone

from tqdm import tqdm

from darpa_src_parse.data_cadets_parser import ProcenanceInf
from darpa_src_parse.enuminfo import JSON_KEY, EVENT_EDGE_INFO

import logging

from darpa_src_parse.parse_tools import JsonReader
from darpa_src_parse.statistics import statistics
logger =logging.getLogger()
handler = logging.StreamHandler()

logger.addHandler(handler)
logger.setLevel(logging.INFO)
OUTPUT_DIR = r"D:\darpa_experiment\theia"


def vertex_filter():
    """ 过滤所有不符合规范不能用的节点
    1.   FileObject要有path信息
    2.   subject要有 pid, ppid , exec
    3.   UnnamedPipe要有sink与src
    4.   NetFlow要有 l ip,port   r ip,port
    """
    del_vertex = []
    for key, value in ProcenanceInf.vertex.items():
        v_type = value.get('type',None)
        if v_type == None:
            #  删除无类型的数据
            del_vertex.append(key)
            continue

        # 对于所有的FileObject进行规范。
        #     UnnamedPipeObject, RegistryKeyObject, PacketSocketObject,
        #     NetFlowObject, MemoryObject, SrcSinkObject，FileObject
        #
        # 目前仅仅对发现的节点类型信息做处理

        if v_type == 'FILE_OBJECT_FILE' or v_type == 'FILE_OBJECT_DIR':
            #  规范此类型的数据,必须要有路径
            if value.get('path',None) == None:
                del_vertex.append(key)
            continue

        if v_type == 'FILE_OBJECT_UNIX_SOCKET':
            #   unixsocket目前没发现有类型以外的其他信息，
            if value.get('path', None) == None:
                del_vertex.append(key)
            continue

        #
        # 对于所有的Subject进行规范。\
        #        SUBJECT_PROCESS, SUBJECT_THREAD,  SUBJECT_UNIT, SUBJECT_BASIC_BLOCK
        #
        # 目前仅仅发现 SUBJECT_PROCESS
        if v_type == 'SUBJECT_PROCESS' or v_type ==  'SUBJECT_THREAD':
            #  规范此类型的数据,默认有 pid, ppid , exec就行了，好像都没有执行文件的path
            if value.get('pid', None)==None or \
                    value.get('commLine', None)==None:
                del_vertex.append(key)
            continue

        if v_type in ['SRCSINK_DATABASE',]:
            continue

        #
        # 对其他类型的Object做规范
        #   UnnamedPipeObject, RegistryKeyObject, PacketSocketObject,
        #     NetFlowObject, MemoryObject, SrcSinkObject
        #

        if v_type == 'unnamedpipe':
            #  规范此类型的数据
            if value.get('src_uuid', None)==None or \
                    value.get('dst_uuid', None)==None:
                del_vertex.append(key)
            continue


        if v_type == 'netflow':
            #  规范此类型的数据
            if value.get('local_ip', None) == None or \
                    value.get('local_port', None) == None or \
                    value.get('remote_port', None) == None or \
                    value.get('remote_ip', None) == None:
                del_vertex.append(key)
            continue

        if v_type == 'registry':
            if value.get('key', None) == None:
                del_vertex.append(key)
            continue

        # 由于此类型都被包含在UnnamedPipeObject，并不会被事件边直接操作，直接删去就行。
        if v_type == 'SrcSink':
            #  规范此类型的数据
            del_vertex.append(key)
            continue

        # if v_type == 'MemoryObject': RegistryKeyObject
        #     #  规范此类型的数据
        #     del_vertex.append(key)
        #     continue
        #    if v_type == 'PacketSocket':
        #             #  规范此类型的数据
        #             del_vertex.append(key)
        #             continue

    statistics.invilid_vertex_num = len(del_vertex)
    for del_key in del_vertex:
        ProcenanceInf.vertex.pop(del_key)

def parse_all_info_from_file(file_name):
    '''
        读取json文件的信息，到 provenance与statics类属性中
        provenance保存的信息有：
            vertex = {} ：都以uuid作为hash主键，存储部分属性信息
            edge = {}           【事件id, 类型，时间，序列，主机id,       subject唯一id, tid, exec, ppid,
                                                                    obj1，obj1_path, obj2，obj2_path】
            host = {}           【主机id, 主机名， 主机类型， 接口名[]，接口mac[], 接口ip[]】
            principal = {}      【用户唯一id， 类型， uid , 用户名， gid, 主机id】

        statics存储信息有：
            start_time 日志开始的最早时间
            end_time 日志的最大时间
            edge_type 记录边的类型与个数
            jsonline  读取的记录条数
        '''

    logger.info('reading file:{}'.format(file_name))

    edge_w = open(os.path.join(OUTPUT_DIR, 'edge_info.json'), 'a+')
    error_line_w = open(os.path.join(OUTPUT_DIR, 'error_line_info.json'), 'a+')
    count = 0
    with open(file_name, 'r', encoding='utf-8') as f:
        with tqdm(total=5000000, unit='line') as pbar:
            while 1:
                try:
                    line = f.readline()
                except Exception as e:
                    # 打印异常的原因
                    traceback.print_exc()
                    error_line_w.write(line)
                    continue
                pbar.update(1)
                try:
                    count += 1
                    if JSON_KEY.F_HOST in line:
                        host = JsonReader.get_host_info(line)
                        if host == None:
                            continue
                        # 保存 host 信息
                        ProcenanceInf.store_host_info(host)
                        continue

                    if JSON_KEY.F_PRINCIPAL in line:
                        principal = JsonReader.get_principal_info(line)
                        # 保存 principal 信息
                        ProcenanceInf.store_principal_info(principal)
                        continue

                    if JSON_KEY.F_EVENT in line:
                        event = JsonReader.get_event_info(line)
                        ProcenanceInf.store_vertex('event', event)

                        # 写读取的文件信息到edge_info
                        event = [str(item) for item in event]
                        e_info = '\t'.join(event) + '\n'
                        edge_w.write(e_info)

                        # 获取时间跨度
                        if int(event[2]) < statistics.log_start_time:
                            statistics.log_start_time = int(event[2])

                        if int(event[2]) > statistics.log_end_time:
                            statistics.log_end_time = int(event[2])

                        # 统计事件类型，与个数
                        e_type = event[1]
                        if statistics.edge_type.get(e_type, None) == None:
                            statistics.edge_type[e_type] = 0

                        statistics.edge_type[e_type] += 1
                        continue

                    if JSON_KEY.F_FILE in line:
                        file = JsonReader.get_file_info(line)
                        ProcenanceInf.store_vertex('file', file)
                        continue

                    if JSON_KEY.F_SUBJECT in line:
                        subject = JsonReader.get_subject_info(line)
                        ProcenanceInf.store_vertex('subject', subject)
                        continue

                    if JSON_KEY.F_SRCSINK in line:
                        srcsink = JsonReader.get_srcsink_info(line)
                        ProcenanceInf.store_vertex('srcsink', srcsink)
                        continue

                    if JSON_KEY.F_UNNAMEDPIPE in line:
                        unnamedpipe = JsonReader.get_unnamedpipe_info(line)
                        ProcenanceInf.store_vertex('unnamedpipe', unnamedpipe)
                        continue

                    if JSON_KEY.F_NETFLOW in line:
                        netflow = JsonReader.get_netflow_info(line)
                        ProcenanceInf.store_vertex('netflow', netflow)
                        continue

                    if JSON_KEY.F_RegistryKeyObject in line:
                        registry = JsonReader.get_registry_info(line)
                        ProcenanceInf.store_vertex('registry', registry)
                        continue

                    if 'MemoryObject' in line or (len(line) >= 140 and len(line) <= 150):
                        continue

                    logger.info(f'此日志未被解析{line}')
                    if line != '':
                        continue
                except:
                    logger.error(f"此条目{line}解析失败")
                    traceback.print_exc()
                    exit(0)

                # 当记录为空时
                statistics.json_line += count
                statistics.json_line -= 1

                logger.info(line)
                break

    edge_w.close()


def store_edge_info():
    '''
    根据已知节点，将源，目的节点都存在的边 写入文件。
    '''

    line_num = 0
    with open(os.path.join(OUTPUT_DIR, 'edge_info.json'), 'r') as file:
        for line in file:
            line_num += 1

    statistics.edge_num = line_num
    # 写事件进入edge.json
    e_write_fd = open(os.path.join(OUTPUT_DIR, 'edge.json'), 'w')

    logger.info('reading edge_info && store edge')
    with open(os.path.join(OUTPUT_DIR, 'edge_info.json'), 'r') as edge:
        with tqdm(total=line_num, unit='line') as pbar:
            while 1:
                pbar.update(1)
                # 【 0 事件id, 1 类型， 2 时间，序列，主机id,  5  subject唯一id, tid, exec, ppid, 9  obj1，obj1_path,11 obj2，obj2_path】

                line = edge.readline()
                if line == '':
                    break

                # 事件id, 类型，时间，序列，主机id,       subject唯一id, tid, exec, ppid,         obj1，obj1_path, obj2，obj2_path
                e_info = line.split('\t')
                type = e_info[1]
                time =  e_info[2]
                subject = e_info[5]
                obj1 =e_info[9]
                obj2 =e_info[11]

                if subject == None:
                    statistics.loss_edge_num += 1
                    continue

                # 针对各种类型的事件做专门的处理
                if type in EVENT_EDGE_INFO.SUB_TO_OBJ:
                    # 应该保证，sub 与  obj 不为空， 源节点sub，目的节点obj, 时间，操作
                    if obj1 != "None":
                        # 确保节点不为空
                        if ProcenanceInf.vertex.get(subject) is None or ProcenanceInf.vertex.get(obj1) is None:
                            statistics.loss_edge_num += 1
                            continue

                        write_info = {'src': subject, 'op': EVENT_EDGE_INFO.MAP_TO_OP[type], 'dst': obj1, 'time': time,
                                      'datetime':(datetime.utcfromtimestamp(int(time)/1e9).replace(tzinfo=timezone.utc).astimezone(timezone(timedelta(hours=-4)))).strftime('%Y-%m-%d %H:%M:%S.%f')[:-3]}
                        json_str = json.dumps(write_info)
                        e_write_fd.write(json_str + '\n')
                        statistics.generated_edge_num += 1
                    else:
                        statistics.loss_edge_num += 1
                    continue



                if type in EVENT_EDGE_INFO.MultiOBJ_TO_SUB:
                    # 应该保证，sub 不为空， 存在obj.   源节点obj1/2，目的节点sub, 时间，操作
                    if obj1 != "None" or obj2 != "None":
                        if obj1:
                            # 确保节点不为空
                            if ProcenanceInf.vertex.get(subject) is None or ProcenanceInf.vertex.get(obj1) is None:
                                statistics.loss_edge_num += 1
                                continue

                            write_info = {'src': obj1, 'op': EVENT_EDGE_INFO.MAP_TO_OP[type], 'dst': subject, 'time': time,
                                      'datetime':(datetime.utcfromtimestamp(int(time)/1e9).replace(tzinfo=timezone.utc).astimezone(timezone(timedelta(hours=-4)))).strftime('%Y-%m-%d %H:%M:%S.%f')[:-3]}
                            json_str = json.dumps(write_info)
                            e_write_fd.write(json_str + '\n')
                            statistics.generated_edge_num += 1
                        if obj2:
                            # 确保节点不为空
                            if ProcenanceInf.vertex.get(subject) is None or ProcenanceInf.vertex.get(obj2) is None:
                                statistics.loss_edge_num += 1
                                continue
                            write_info = {'src': obj2, 'op': EVENT_EDGE_INFO.MAP_TO_OP[type], 'dst': subject, 'time': time,
                                      'datetime':(datetime.utcfromtimestamp(int(time)/1e9).replace(tzinfo=timezone.utc).astimezone(timezone(timedelta(hours=-4)))).strftime('%Y-%m-%d %H:%M:%S.%f')[:-3]}
                            json_str = json.dumps(write_info)
                            e_write_fd.write(json_str + '\n')
                            statistics.generated_edge_num += 1
                    else:
                        statistics.loss_edge_num += 1
                    continue

                if type in EVENT_EDGE_INFO.ALL_TO_OBJ2:
                    # 应该保证，sub 与  obj,obj2 不为空.  obj --> obj2  sub-->obj2
                    if obj1 != "None" and  obj2 != "None":
                        # 确保节点不为空
                        if ProcenanceInf.vertex.get(obj2) is None or ProcenanceInf.vertex.get(obj1) is None :
                            statistics.loss_edge_num += 1
                            continue

                        write_info = {'src': obj1, 'op': EVENT_EDGE_INFO.MAP_TO_OP[type], 'dst': obj2, 'time': time,
                                      'datetime':(datetime.utcfromtimestamp(int(time)/1e9).replace(tzinfo=timezone.utc).astimezone(timezone(timedelta(hours=-4)))).strftime('%Y-%m-%d %H:%M:%S.%f')[:-3]}
                        json_str = json.dumps(write_info)
                        e_write_fd.write(json_str + '\n')

                        # 确保节点不为空
                        if ProcenanceInf.vertex.get(subject) is None or ProcenanceInf.vertex.get(obj2) is None:
                            statistics.loss_edge_num += 1
                            continue

                        write_info = {'src': subject, 'op': EVENT_EDGE_INFO.MAP_TO_OP[type], 'dst': obj2, 'time': time,
                                      'datetime':(datetime.utcfromtimestamp(int(time)/1e9).replace(tzinfo=timezone.utc).astimezone(timezone(timedelta(hours=-4)))).strftime('%Y-%m-%d %H:%M:%S.%f')[:-3]}
                        json_str = json.dumps(write_info)
                        e_write_fd.write(json_str + '\n')

                        statistics.generated_edge_num += 2
                    else:
                        statistics.loss_edge_num += 1
                    continue

                if type in EVENT_EDGE_INFO.OBJ_TO_SUB:
                    # 应该保证，sub 与  obj 不为空
                    if obj1 != "None":
                        # 确保节点不为空
                        if ProcenanceInf.vertex.get(subject) is None or ProcenanceInf.vertex.get(obj1) is None:
                            statistics.loss_edge_num += 1
                            continue

                        write_info = {'src': obj1, 'op': EVENT_EDGE_INFO.MAP_TO_OP.get(type,type), 'dst': subject, 'time': time,
                                      'datetime':(datetime.utcfromtimestamp(int(time)/1e9).replace(tzinfo=timezone.utc).astimezone(timezone(timedelta(hours=-4)))).strftime('%Y-%m-%d %H:%M:%S.%f')[:-3]}
                        json_str = json.dumps(write_info)
                        e_write_fd.write(json_str + '\n')
                        statistics.generated_edge_num += 1
                    else:
                        statistics.loss_edge_num += 1
                    continue

                if type not in statistics.unhandle_egde_type:
                    statistics.unhandle_egde_type.append(type)
                statistics.unhandle_egde_num += 1


    e_write_fd.close()


def theia_data_parse():
    ProcenanceInf.clear_data()
    e3path = r'F:\largedata\dataset\darpa-e3-theia\ta1-theia-e3-official-1r.json\ta1-theia-e3-official-1r.json'
    e3path_dir = r'F:\largedata\dataset\darpa-e3-theia'

    entries = os.listdir(e3path_dir)
    paths = [os.path.join(e3path_dir,p) for p in entries if os.path.isdir(os.path.join(e3path_dir,p))]

    # 自定义排序函数，使用正则表达式提取文件名中的序号
    def extract_number(filename):
        match = re.search(r'json(?:\.(\d+))?', filename)
        if match:
            return int(match.group(1)) if match.group(1) else 0  # 如果没有序号，则返回0
        return None

    file_list = [e3path]
    file_list = []

    # 获取所有文件
    for px in paths:
        entries = os.listdir(px)
        # 使用 sorted 函数和自定义的排序键进行排序
        sorted_files = sorted(entries, key=extract_number)
        file_list.extend([os.path.join(px,entry) for entry in sorted_files])

    statistics.handle_files = file_list

    #
    # 阶段一， 遍历所有文件的信息，提取到provenance的数据结构中。
    #

    t_parse_info_json_start = time.time()
    # 处理数据,处理前清空 edge_info.json文件，因为会追加写入
    open(os.path.join(OUTPUT_DIR, 'edge_info.json'), 'w')
    for file_path in file_list:
        logger.info(f"正在解析所有节点信息,{file_path}")
        parse_all_info_from_file(file_path)
    t_parse_info_json_down = time.time()

    statistics.read_node_spend = t_parse_info_json_down - t_parse_info_json_start

    statistics.node_num = len(ProcenanceInf.vertex)

    # 规范vertex的各类型阶段的数据模式，对不符合格式的阶段对象进行清除（主要存储在provenance.vertex）
    logger.info(f"正在过滤不符合规范的节点")
    vertex_filter()

    #
    # 第二阶段 -- 只保留源、目的节点存在的边。
    #

    logger.info(f"正在提取所有边信息，写入edge.json")
    store_edge_info()

    t_write_edge_down = time.time()
    statistics.store_edge_spend = t_write_edge_down - t_parse_info_json_down

    WRITE_FILE = True
    if WRITE_FILE == True:
        logger.info(f"正在将所有统计信息写入文件")

        # 将整合的节点信息，写入文件
        f_vertex = open(os.path.join(OUTPUT_DIR, 'vertex.json'), 'w')
        for key, value in ProcenanceInf.vertex.items():
            json_str = json.dumps({key: value})
            f_vertex.write(json_str + '\n')
        f_vertex.close()

        # 将 host 信息，写入文件
        f1 = open(os.path.join(OUTPUT_DIR, 'host.json'), 'w')
        for key, value in ProcenanceInf.host.items():
            json_str = json.dumps({key: value})
            f1.write(json_str + '\n')
        f1.close()

        # 将 principal 信息，写入文件
        f2 = open(os.path.join(OUTPUT_DIR, 'principal.json'), 'w')
        for key, value in ProcenanceInf.principal.items():
            json_str = json.dumps({key: value})
            f2.write(json_str + '\n')
        f2.close()

        # 将边的统计写入文件
        with open(os.path.join(OUTPUT_DIR, 'edge_type_info.json'), 'w') as f:
            for key, value in statistics.edge_type.items():
                json_str = json.dumps({key: value})
                f.write(json_str + '\n')
    statistics.write_kinds_of_files_spend = time.time() - t_write_edge_down

    statics_info = statistics.show()
    with open(os.path.join(OUTPUT_DIR, 'execute_program_info.json'), 'w') as f:
        f.write(statics_info)

theia_data_parse()