import argparse
import json
import os
import sqlite3
from typing import Dict, Optional, Tuple

from cold_start_analyzer import callstack_helper, db_helper, phase_helper, util
from cold_start_analyzer.analyzer_factory import AnalyzerFactory
from cold_start_analyzer.comparison import compare_phase_breakdown, compare_table_items
from cold_start_analyzer.comparison_report_generator import ComparisonReportGenerator
from cold_start_analyzer.report_generator import ReportGenerator


def _split_results(result: list) -> Tuple[Optional[Dict], Optional[list]]:
    if not result:
        return None, None
    entry = result[0]
    subs = entry.get("subs", [])
    so_data = None
    phase_subs = []
    for item in subs:
        if item.get("name", "").startswith("SO加载"):
            so_data = item.get("cost")
        else:
            phase_subs.append(item)
    phase_data = None
    if phase_subs:
        phase_data = {
            "name": entry.get("name"),
            "cost": entry.get("cost"),
            "subs": phase_subs,
        }
    return phase_data, so_data


def analyze_trace(
    file_path: str,
    thread_name: Optional[str],
    trace_streamer_dir: Optional[str],
    mode: str,
    phase_depth: int,
    stage_threshold: float,
    stack_depth: int,
    stack_limit: int,
    hotspot_count: int,
) -> Dict[str, object]:
    if not file_path:
        raise ValueError("file_path is required")
    thread_name = thread_name or ""
    connection = None
    try:
        db_file = util.convert_ftrace_to_sqlite(file_path, trace_streamer_dir)
        if not db_file or not os.path.exists(db_file):
            raise FileNotFoundError(f"无法生成数据库文件: {db_file}")
        connection = sqlite3.connect(db_file)
        cursor = connection.cursor()
        detected_thread = thread_name or db_helper.detect_main_thread(cursor)
        if detected_thread and detected_thread != thread_name:
            print(f"自动检测到主线程: {detected_thread}")
            thread_name = detected_thread
        thread_start, thread_end = db_helper.get_thread_time_bounds(cursor, thread_name)
        factory = AnalyzerFactory(cursor, os.path.join(util.CURRENT_DIR, "config.ini"), thread_name)
        result = factory.analyze()
        phase_data, so_data = _split_results(result)
        payload: Dict[str, object] = {
            "metadata": {
                "file": os.path.abspath(file_path),
                "thread": thread_name,
                "mode": mode,
                "start_ts": thread_start,
                "end_ts": thread_end,
            }
        }
        include_phases = mode in ("all", "phases", "compare")
        include_so = mode in ("all", "so", "compare")
        if include_phases and phase_data:
            payload["phase_analysis"] = phase_data
            breakdown_depth = max(1, phase_depth + 1)
            truncated = phase_helper.truncate_phase_tree(phase_data, breakdown_depth)
            if truncated:
                payload["phase_breakdown"] = truncated
        entry_cost = None
        if phase_data:
            entry_cost = phase_data.get("cost")
        effective_start = thread_start
        effective_end = thread_end
        if isinstance(entry_cost, (int, float)) and thread_end is not None:
            potential_start = thread_end - entry_cost
            if thread_start is not None:
                effective_start = max(potential_start, thread_start)
            else:
                effective_start = potential_start
        if include_so and so_data is not None:
            payload["so_analysis"] = so_data
        stack_rows, bounds, hotspots_wall, hotspots_self = callstack_helper.build_callstack_table(
            cursor,
            thread_name,
            effective_start,
            effective_end,
            depth=max(1, stack_depth),
            max_rows=max(1, stack_limit),
            hotspot_count=max(0, hotspot_count),
            min_ratio=stage_threshold,
            total_duration_override=entry_cost,
        )
        if stack_rows:
            payload["callstack_table"] = stack_rows
            payload["callstack_range"] = {
                "start": bounds[0],
                "end": bounds[1],
            }
        if hotspots_wall:
            payload["callstack_hotspots"] = hotspots_wall
        if hotspots_self:
            payload["callstack_hotspots_self"] = hotspots_self
        if len(payload.keys()) <= 1:
            raise RuntimeError("未能生成任何有效结果，请检查 trace 或参数。")
        return payload
    finally:
        if connection:
            connection.close()


def entry(
    file_path: str,
    thread_name: Optional[str],
    output_path: Optional[str] = None,
    trace_streamer_dir: Optional[str] = None,
    mode: str = "all",
    phase_depth: int = 5,
    stage_threshold: float = 0.05,
    stack_depth: int = 4,
    stack_limit: int = 400,
    hotspot_count: int = 15,
) -> None:
    output_path = output_path or os.path.join(os.getcwd(), "output")
    os.makedirs(output_path, exist_ok=True)
    payload = analyze_trace(
        file_path,
        thread_name,
        trace_streamer_dir,
        mode,
        phase_depth,
        stage_threshold,
        stack_depth,
        stack_limit,
        hotspot_count,
    )
    result_json = os.path.join(output_path, "result.json")
    with open(result_json, "w", encoding="utf-8") as file:
        json.dump(payload, file, indent=4, ensure_ascii=False)
    generator = ReportGenerator(result_json, output_path)
    generator.generate()


def compare_entry(
    file_a: str,
    file_b: str,
    thread_a: Optional[str],
    thread_b: Optional[str],
    output_path: Optional[str],
    trace_streamer_dir: Optional[str],
    phase_depth: int,
    stage_threshold: float,
    stack_depth: int,
    stack_limit: int,
    hotspot_count: int,
) -> None:
    output_path = output_path or os.path.join(os.getcwd(), "output_compare")
    os.makedirs(output_path, exist_ok=True)
    payload_a = analyze_trace(
        file_a,
        thread_a,
        trace_streamer_dir,
        "compare",
        phase_depth,
        stage_threshold,
        stack_depth,
        stack_limit,
        hotspot_count,
    )
    payload_b = analyze_trace(
        file_b,
        thread_b,
        trace_streamer_dir,
        "compare",
        phase_depth,
        stage_threshold,
        stack_depth,
        stack_limit,
        hotspot_count,
    )
    comparison = compare_phase_breakdown(
        payload_a.get("phase_breakdown"),
        payload_b.get("phase_breakdown"),
    )
    comparison["callstack_table_diff"] = compare_table_items(
        payload_a.get("callstack_table"),
        payload_b.get("callstack_table"),
    )
    comparison["hotspots_wall_diff"] = compare_table_items(
        payload_a.get("callstack_hotspots"),
        payload_b.get("callstack_hotspots"),
    )
    comparison["hotspots_self_diff"] = compare_table_items(
        payload_a.get("callstack_hotspots_self"),
        payload_b.get("callstack_hotspots_self"),
    )
    result = {
        "metadata": {
            "mode": "compare",
            "file_a": payload_a["metadata"]["file"],
            "file_b": payload_b["metadata"]["file"],
            "thread_a": payload_a["metadata"]["thread"],
            "thread_b": payload_b["metadata"]["thread"],
        },
        "trace_a": payload_a,
        "trace_b": payload_b,
        "comparison": comparison,
    }
    result_json = os.path.join(output_path, "comparison_result.json")
    with open(result_json, "w", encoding="utf-8") as file:
        json.dump(result, file, indent=4, ensure_ascii=False)
    generator = ComparisonReportGenerator(result_json, output_path)
    generator.generate()


def main() -> None:
    parser = argparse.ArgumentParser(description="Analyze cold start trace.")
    parser.add_argument("-f", "--file", required=True, help="Path to the trace file.")
    parser.add_argument("-o", "--output", help="Output directory for generated artifacts.")
    parser.add_argument("-t", "--thread", help="Target thread name (optional).")
    parser.add_argument("--phase-depth", type=int, default=5, help="阶段拆分最大深度，默认 5。")
    parser.add_argument(
        "--stage-threshold",
        type=float,
        default=0.05,
        help="阶段拆分中最小占比阈值 (0-1)，默认 0.05。",
    )
    parser.add_argument("--stack-depth", type=int, default=4, help="调用栈表格的最大层级深度，默认 4。")
    parser.add_argument("--stack-limit", type=int, default=400, help="调用栈表格最大行数，默认 400。")
    parser.add_argument("--hotspot-count", type=int, default=15, help="调用栈热点函数的数量，默认 15。")
    parser.add_argument("--compare", dest="compare_file", help="用于对比分析的另一个 trace 文件。")
    parser.add_argument("--thread-b", help="对比分析时第二个 trace 使用的线程名称。")
    parser.add_argument(
        "-s",
        "--trace-streamer",
        help="Directory containing trace_streamer executables (overrides TRACE_STREAMER_DIR env).",
    )
    parser.add_argument(
        "-m",
        "--mode",
        choices=("all", "phases", "so"),
        default="all",
        help="选择要生成的内容: all(默认)、phases(仅阶段拆解)、so(仅SO整理)。",
    )
    args = parser.parse_args()
    if args.compare_file:
        compare_entry(
            args.file,
            args.compare_file,
            args.thread,
            args.thread_b,
            args.output,
            args.trace_streamer,
            args.phase_depth,
            args.stage_threshold,
            args.stack_depth,
            args.stack_limit,
            args.hotspot_count,
        )
        return
    entry(
        args.file,
        args.thread,
        args.output,
        args.trace_streamer,
        args.mode,
        args.phase_depth,
        args.stage_threshold,
        args.stack_depth,
        args.stack_limit,
        args.hotspot_count,
    )


if __name__ == "__main__":
    main()
