import os
import re
from tqdm import tqdm
from urllib.parse import urlparse
from collections import defaultdict

def extract_process_info(log_file):
    process_counter = defaultdict(int)
    process_info = {}
    
    with open(log_file, 'r', encoding='utf-8', errors='ignore') as f:
        for line in f:
            line = line.strip()
            if not line or line.startswith('Last<=') or line.startswith('L_Start<='):
                continue

            fields = dict(re.findall(r'([A-Z])<=>([^\[\]=]+)',line))

            if 'U' in fields:
                url = fields['U']
                try:
                    domain = urlparse(url).netloc
                    if domain:
                        process_name = domain
                        program_name = domain
                        process_counter[domain] += 1
                        if process_name not in process_info:
                            process_info[process_name] = program_name
                except:
                    continue
            elif 'P' in fields:
                process_name = fields['P'].strip()
                if process_name:
                    process_counter[process_name] += 1
                    if process_name not in process_info:
                        process_name = fields.get('N', process_name).strip()
                        process_info[process_name] = process_name
    return process_info, process_counter

def process_all_logs(input_root, output_file):
    total_process_counter = defaultdict(int)
    process_name_mapping = {}
    total_files = 0
    
    for root, _, files in os.walk(input_root):
        for file in tqdm(files, desc=f"Processing {os.path.basename(root)}"):
            if file.endswith('.txt'):
                log_path = os.path.join(root, file)
                try:
                    process_info, process_counter = extract_process_info(log_path)
                    for p, counter in process_counter.items():
                        total_process_counter[p] += counter
                        if p not in process_name_mapping and p in process_info:
                            process_name_mapping[p] = process_info[p]
                    total_files += 1
                except Exception as e:
                    print(f"\nError processing {log_path}: {str(e)}")

    filtered_processes = {p:(n, total_process_counter[p]) for p, n in process_name_mapping.items()}
    
    with open(output_file, 'w', encoding='utf-8') as f:
        f.write("ProcessName,ProgramName,Count\n")
        for p, (n,count) in sorted(filtered_processes.items(),key = lambda x: x[1][1],reverse=True):
            f.write(f'"{p}","{n}",{count}\n')
    
    print(f"\nProcessed {total_files} log files.")
    print(f"Found {len(process_name_mapping)} unique process names.")
    print(f"Results saved to {output_file}")

def generate_process_name_mapping():
    input_root = "data/processed"
    output_file = "data/process_names_mapping.csv"
    os.makedirs(os.path.dirname(output_file), exist_ok=True)
    process_all_logs(input_root, output_file)

if __name__ == "__main__":
    generate_process_name_mapping()