#!/usr/bin/env bash
set -euo pipefail

# Usage: scripts/run_local_go.sh [N=4] [DURATION=15]

SCRIPT_DIR="$(cd "$(dirname "$0")" && pwd)"
ROOT_DIR="$(cd "$SCRIPT_DIR/.." && pwd)"
cd "$ROOT_DIR"

N="${1:-4}"
DURATION="${2:-15}"

echo "[run_local_go] root: $ROOT_DIR, N=$N, duration=${DURATION}s"

# Output data root and timestamped run dir
DATA_ROOT="$ROOT_DIR/data"
TS=$(date +%Y%m%d_%H%M%S)
RUN_DIR="$DATA_ROOT/$TS"
export RUN_DIR
mkdir -p "$RUN_DIR/logs"
echo "[run_local_go] data dir: $RUN_DIR"

# Build Go binary
echo "[run_local_go] building Go binary..."
go build -o main ./main.go

# Generate node keys and threshold keys
echo "[run_local_go] generating keys..."
./main keys --path "$RUN_DIR" --nodes "$N"

# T = 2 * ((N-1)//3) + 1
T=$(( 2 * ((N-1)/3) + 1 ))
./main threshold_keys --path "$RUN_DIR" --N "$N" --T "$T"

# Generate committee.json using the public keys and local addresses 127.0.0.1:9000+i
python3 - <<'PY'
import json, os
N = int(os.environ.get('N_VAL','4'))
RUN_DIR = os.environ['RUN_DIR']
committee = {}
for i in range(N):
    with open(os.path.join(RUN_DIR, f'.node-key-{i}.json'),'r') as f:
        pub = json.load(f)['public']
    committee[str(i)] = {
        'name': pub,
        'node_id': i,
        'addr': f'127.0.0.1:{9000+i}'
    }
with open(os.path.join(RUN_DIR,'.committee.json'),'w') as f:
    json.dump(committee, f, indent=2)

# parameters.json with integer protocol (SMVBA=1)
params = {
    'pool': {
        'tx_size': 250,
        'batch_size': 500,
        'max_queue_size': 10000
    },
    'consensus': {
        'sync_timeout': 500,
        'network_delay': 2000,
        'min_block_delay': 0,
        'ddos': False,
        'faults': max(0, (N-1)//3),
        'retry_delay': 5000,
        'protocol': 1
    }
}
with open(os.path.join(RUN_DIR,'.parameters.json'),'w') as f:
    json.dump(params, f, indent=2)
print('committee and parameters generated')
PY
export N_VAL="$N"
export RUN_DIR
export TS

# Prepare logs and clean old db
rm -rf "$RUN_DIR"/db-*

echo "[run_local_go] starting $N node(s) in tmux..."
for (( i=0; i< N; i++ )); do
  session="dumbo_$i"
  cmd="./main run --keys $RUN_DIR/.node-key-$i.json --threshold_keys $RUN_DIR/.node-ts-key-$i.json --committee $RUN_DIR/.committee.json --store $RUN_DIR/db-$i --parameters $RUN_DIR/.parameters.json --log_level 15 --log_out $RUN_DIR/logs --node_id $i"
  tmux new -d -s "$session" "$cmd"
done

echo "[run_local_go] running for ${DURATION}s..."
sleep "$DURATION"

echo "[run_local_go] stopping tmux server..."
tmux kill-server || true

echo "[run_local_go] logs:"
ls -la "$RUN_DIR/logs" || true

# Parse logs for simple throughput estimation
echo "[run_local_go] parsing logs for TPS..."
python3 - <<'PY'
import glob, json
from pathlib import Path

import os
RUN_DIR = os.environ['RUN_DIR']
logs = sorted(glob.glob(os.path.join(RUN_DIR,'logs/node-debug-*.log')))
if not logs:
    print('no debug logs found, skip stats')
else:
    # read batch size
    try:
        params = json.load(open(os.path.join(RUN_DIR,'.parameters.json')))
        batch_size = int(params['pool'].get('batch_size', 500))
    except Exception:
        batch_size = 500

    def count_outputs(path: str) -> int:
        cnt = 0
        with open(path, 'r', errors='ignore') as f:
            for line in f:
                # match decision marker
                if 'Processing Ouput epoch' in line:
                    cnt += 1
        return cnt

    per_node = {Path(p).stem: count_outputs(p) for p in logs}
    # estimate decided blocks as max across nodes (avoid double counting across replicas)
    decided_blocks = max(per_node.values()) if per_node else 0

    # duration from env passed down by the script call context is unknown here; fall back to 1 if missing
    try:
        duration = int(os.environ.get('DURATION_VAL','0'))
        if duration <= 0:
            duration = 1
    except Exception:
        duration = 1

    est_tps = (decided_blocks * batch_size) / duration
    print('--- STATS ---')
    print('per_node_blocks:', per_node)
    print('decided_blocks_est:', decided_blocks)
    print('batch_size:', batch_size)
    print('duration_s:', duration)
    print('consensus_TPS_est:', int(est_tps))
    print('---------------')
    
    # Write results to CSV
    import csv
    N = int(os.environ.get('N_VAL','4'))
    result_file = os.path.join(RUN_DIR, 'result.csv')
    with open(result_file, 'w', newline='') as f:
        writer = csv.writer(f)
        writer.writerow(['timestamp', 'N', 'duration_s', 'batch_size', 'decided_blocks', 'consensus_TPS_est', 'per_node_blocks'])
        writer.writerow([os.environ.get('TS',''), N, duration, batch_size, decided_blocks, int(est_tps), str(per_node)])
    print(f'results written to: {result_file}')
PY

echo "[run_local_go] done."

