import jsonlines
import json
import os
from tqdm import tqdm
import numpy as np

time_period = 60

# 输出目录
output_dir = './dataset/datacon'

# 创建输出目录
if not os.path.exists(output_dir):
    os.makedirs(output_dir)

ip_logs = {}
with jsonlines.open("./dataset/datacon/black/flowmeter.log", mode='r') as flowmeter_results:
    for flowmeter_row in tqdm(flowmeter_results):
        if len(np.array(flowmeter_row['packet_timestamp_vector'])) < 10:
            continue
        src_ip = flowmeter_row['id.orig_h']
        des_ip = flowmeter_row['id.resp_h']
        flow_name = "black"
        if flow_name not in ip_logs:
            ip_logs[flow_name] = []
        ip_logs[flow_name].append(flowmeter_row)
    flowmeter_results.close()

with jsonlines.open("./dataset/datacon/white./flowmeter.log", mode='r') as flowmeter_results:
    for flowmeter_row in tqdm(flowmeter_results):
        if len(np.array(flowmeter_row['packet_timestamp_vector'])) < 10:
            continue
        src_ip = flowmeter_row['id.orig_h']
        des_ip = flowmeter_row['id.resp_h']
        flow_name = "white"

        if flow_name not in ip_logs:
            ip_logs[flow_name] = []
        ip_logs[flow_name].append(flowmeter_row)
    flowmeter_results.close()

for title, logs in ip_logs.items():
    ip_logs[title].sort(key=lambda x: float(x['ts']))
    app_version = '0'
    group = []
    count = 0

    for flowrow in logs:
        if len(group) == 0 :
            group.append(flowrow)
        else :
            if (abs(flowrow['ts'] - group[0]['ts']) < time_period) and (len(group) <= 1000) :
                group.append(flowrow)
            else :
                timestamp = min(log['ts'] for log in group)
                output_file = os.path.join(output_dir, title, app_version, f'{timestamp}.pcap.json')
                os.makedirs(os.path.dirname(output_file), exist_ok=True)
                with open(output_file, 'w') as f:
                    f.write('[')
                    count_split = 0

                    for flowmeter_row in group:
                        packet_timestamp_vector_np = np.array(flowmeter_row['packet_timestamp_vector'])
                        packet_direction_vector_np = 2 * np.array(flowmeter_row["packet_direction_vector"]) - 1
                        packet_payload_size_np = np.array(flowmeter_row['packet_payload_size_vector']) * packet_direction_vector_np
                        arrive_time_delta_np = packet_timestamp_vector_np - packet_timestamp_vector_np[0]
                        one_line = {"uid":flowmeter_row['uid'],
                                'start_timestamp':flowmeter_row['ts'],
                                'packet_length':packet_payload_size_np.tolist(),
                                'arrive_time_delta':arrive_time_delta_np.tolist(),
                                'id.orig_h':flowmeter_row['id.orig_h'],
                                'id.resp_h':flowmeter_row['id.resp_h'],
                            }
                        json_str = json.dumps(one_line)
                        f.write(json_str)
                        if count_split == len(group) - 1:
                            continue
                        else:
                            count_split += 1
                            f.write(',')
                    f.write(']')
                    f.close()
                    group = []