#!/usr/bin/env python3
"""
feature_builder.py
Takes sessions (from dataset/raw_sessions.pkl) and extracts tabular features and graph-based features.
Outputs a parquet file of features (one row per session) and a serialized graph objects file.
"""
import argparse
import pickle
import pandas as pd
import numpy as np
from datetime import datetime
import os
from graph_utils import build_process_tree

parser = argparse.ArgumentParser()
parser.add_argument('--in', dest='infile', required=True)
parser.add_argument('--out', dest='outfile', default='dataset/features.parquet')
args = parser.parse_args()

with open(args.infile, 'rb') as f:
    sessions = pickle.load(f)

rows = []
graphs = []
for s in sessions:
    evs = s['events']
    # compute basic stats
    start = datetime.fromisoformat(s['start'])
    end = datetime.fromisoformat(s['end'])
    duration = (end - start).total_seconds()
    pids = set()
    commands = {}
    create_count = 0
    exit_count = 0
    write_bytes = 0
    net_conns = 0
    for e in evs:
        if 'pid' in e:
            pids.add(int(e['pid']))
        if e.get('event') == 1:
            create_count += 1
        if e.get('event') == 3:
            exit_count += 1
        if e.get('event') == 2 and 'count' in e and e['count']:
            write_bytes += int(e['count'] or 0)
        if e.get('dport'):
            net_conns += 1
        if e.get('comm'):
            commands[e.get('comm')] = commands.get(e.get('comm'), 0) + 1
    unique_cmds = len(commands)
    top_cmd = sorted(commands.items(), key=lambda x: -x[1])[0][0] if commands else ''

    # build process tree graph
    G = build_process_tree(evs)
    nodes = G.number_of_nodes()
    edges = G.number_of_edges()
    max_depth = 0
    try:
        depths = [len(nx.shortest_path(G, source=n)) for n in G.nodes()]
        max_depth = max(depths) if depths else 0
    except Exception:
        max_depth = 0

    row = {
        'start': s['start'],
        'end': s['end'],
        'duration': duration,
        'num_pids': len(pids),
        'create_count': create_count,
        'exit_count': exit_count,
        'write_bytes': write_bytes,
        'net_conns': net_conns,
        'unique_cmds': unique_cmds,
        'top_cmd': top_cmd,
        'graph_nodes': nodes,
        'graph_edges': edges,
        'graph_maxdepth': max_depth
    }
    rows.append(row)

# write features
os.makedirs(os.path.dirname(args.outfile), exist_ok=True)
df = pd.DataFrame(rows)
df.to_parquet(args.outfile, index=False)
print('wrote features to', args.outfile)