#!/usr/bin/env python3
import argparse
import os
import re
import subprocess
import sys
from datetime import datetime, timedelta
from collections import defaultdict
import csv
#ALTER SYSTEM SET logging_collector = 'on';
#ALTER SYSTEM SET log_destination = 'csvlog';
#ALTER SYSTEM SET log_min_duration_statement = 0;
#ALTER SYSTEM SET log_line_prefix = '%m,%u,%d,%p,%h,%r,%i,%e,%c,%l,%s,%v,%x,%q,%a,%n,%';
#SELECT pg_reload_conf();
def parse_args():
    parser = argparse.ArgumentParser(description="PostgreSQL SQL Execution Log Analyzer")
    parser.add_argument("--log", help="PostgreSQL log file path (auto-detect if not set)")
    parser.add_argument("--start", help="Start time (YYYY-MM-DD HH:MM:SS)", default=datetime.now().strftime("%Y-%m-%d 00:00:00"))
    parser.add_argument("--end", help="End time (YYYY-MM-DD HH:MM:SS)", default=datetime.now().strftime("%Y-%m-%d %H:%M:%S"))
    parser.add_argument("--top", type=int, default=100, help="Number of Top SQLs to show")
    parser.add_argument("--detail", action="store_true", help="Show detailed executions for Top SQLs")
    parser.add_argument("--output", help="Export result to CSV")
    parser.add_argument("--debug", action="store_true", help="Enable debug output")
    return parser.parse_args()

def run_psql_cmd(cmd):
    try:
        result = subprocess.run(["psql", "-Atc", cmd], stdout=subprocess.PIPE, stderr=subprocess.PIPE, check=True)
        return result.stdout.decode().strip()
    except subprocess.CalledProcessError as e:
        print(f"Error running psql command: {cmd}\n{e.stderr.decode()}")
        sys.exit(1)

def get_logfile_path():
    data_dir = run_psql_cmd("SHOW data_directory;")
    log_file = run_psql_cmd("SELECT pg_current_logfile();")
    return os.path.join(data_dir, log_file)

def detect_log_mode(logfile):
    with open(logfile, 'r', encoding='utf-8') as f:
        first_line = f.readline()
        return 'csv' if first_line.startswith('log_time') else 'log'

def parse_log_line(line, mode):
    if mode == 'csv':
        # Expect CSV header, parse with csv.reader
        reader = csv.reader([line])
        try:
            row = next(reader)
            duration_re = re.search(r'duration: ([\d\.]+) ms\s+statement: (.*)', row[-1])
            if duration_re:
                return {
                    'timestamp': row[0],
                    'user': row[1],
                    'db': row[2],
                    'pid': row[3],
                    'host': row[4],
                    'port': row[5],
                    'app': row[15],
                    'sql': duration_re.group(2).strip(),
                    'duration': float(duration_re.group(1)),
                }
        except Exception:
            return None
    else:
        duration_re = re.search(r'^(\d{4}-\d{2}-\d{2} \d{2}:\d{2}:\d{2}\.\d+) .*duration: ([\d\.]+) ms\s+statement: (.*)', line)
        if duration_re:
            return {
                'timestamp': duration_re.group(1),
                'sql': duration_re.group(3).strip(),
                'duration': float(duration_re.group(2)),
                'user': '',
                'db': '',
                'pid': '',
                'host': '',
                'port': '',
                'app': ''
            }
    return None

def format_bins(durations):
    if not durations:
        return ["0.00ms:0:0.0"] * 10
    min_val = min(durations)
    max_val = max(durations)
    step = (max_val - min_val) / 10 if max_val != min_val else 1
    bins = [0]*10
    bins_sum = [0.0]*10
    for d in durations:
        idx = min(int((d - min_val) // step), 9)
        bins[idx] += 1
        bins_sum[idx] += d
    result = []
    for i in range(10):
        label = f"{min_val + step * i:.2f}ms"
        result.append(f"{label}:{bins[i]}:{bins_sum[i]:.2f}")
    return result

def analyze_logs(args):
    logfile = args.log or get_logfile_path()
    mode = detect_log_mode(logfile)
    if args.debug:
        print(f"[DEBUG] Log file: {logfile}")
        print(f"[DEBUG] Log mode: {mode}")

    start_dt = datetime.strptime(args.start, "%Y-%m-%d %H:%M:%S")
    end_dt = datetime.strptime(args.end, "%Y-%m-%d %H:%M:%S")

    sql_data = defaultdict(list)

    with open(logfile, 'r', encoding='utf-8') as f:
        for line in f:
            record = parse_log_line(line, mode)
            if not record:
                continue
            try:
                log_time = datetime.strptime(record['timestamp'].split('.')[0], "%Y-%m-%d %H:%M:%S")
                if not (start_dt <= log_time <= end_dt):
                    continue
            except:
                continue
            sql_data[record['sql']].append(record)

    if not sql_data:
        print("No SQL execution records matched.")
        return

    summary = []
    for idx, (sql, entries) in enumerate(sorted(sql_data.items(), key=lambda x: sum(e['duration'] for e in x[1]), reverse=True)[:args.top], start=1):
        durations = [e['duration'] for e in entries]
        total = sum(durations)
        count = len(durations)
        avg = total / count
        min_val = min(durations)
        max_val = max(durations)
        bins_summary = format_bins(durations)
        truncated_sql = sql.replace('\n', ' ')[:100]
        row = [
            str(idx),
            f"{total:.2f}",
            str(count),
            f"{avg:.2f}",
            f"{min_val:.2f}",
            f"{max_val:.2f}",
            " | ".join(bins_summary),
            truncated_sql
        ]
        summary.append(row)

    # Display summary
    print("No   Total   Count   Avg   Min   Max   Bins Summary   SQL (truncated)")
    for row in summary:
        print(" ".join(row))

    # Write CSV if needed
    if args.output:
        with open(args.output, 'w', newline='', encoding='utf-8') as csvfile:
            writer = csv.writer(csvfile)
            writer.writerow(["No", "Total", "Count", "Avg", "Min", "Max", "Bins Summary", "SQL"])
            for row in summary:
                writer.writerow(row)

    # Show details
    if args.detail:
        print("\n=== Detailed SQL Execution Info ===")
        for sql, entries in sorted(sql_data.items(), key=lambda x: sum(e['duration'] for e in x[1]), reverse=True)[:args.top]:
            print(f"\nSQL: {sql}\n{'-' * 80}")
            for e in entries:
                info = f"{e['timestamp']} duration={e['duration']:.2f}ms user={e['user']} host={e['host']} port={e['port']} app={e['app']}"
                print(info)

if __name__ == "__main__":
    args = parse_args()
    analyze_logs(args)
