import argparse
import json
import os
from typing import Any, Dict, Optional

from llm_client import LLMClient
from system_prompts import get_system_prompt
from user_prompts import get_user_prompt


def parse_args() -> argparse.Namespace:
    parser = argparse.ArgumentParser(
        description="Run a sample chat request against an OpenAI-compatible gateway."
    )
    parser.add_argument(
        "--config", default="llm/config.json", help="Path to JSON config file."
    )
    parser.add_argument(
        "--base-url",
        default=None,
        help="Override base URL; otherwise resolved from config by model.",
    )
    parser.add_argument(
        "--model",
        default=None,
        help="Model name to use; defaults to config default_model.",
    )
    parser.add_argument(
        "--temperature",
        type=float,
        default=None,
        help="Sampling temperature; falls back to config.",
    )
    # 为了兼容不同实现，默认不发送以下可选参数，除非显式指定
    parser.add_argument("--top-p", type=float, default=None)
    parser.add_argument("--presence-penalty", type=float, default=None)
    parser.add_argument("--frequency-penalty", type=float, default=None)
    return parser.parse_args()


def _load_config(path: str) -> Dict[str, Any]:
    if not path:
        return {}
    if not os.path.exists(path):
        return {}
    try:
        with open(path, "r", encoding="utf-8") as f:
            data = json.load(f)
            return data if isinstance(data, dict) else {}
    except Exception:
        return {}


def _ensure_http_scheme(url: Optional[str]) -> Optional[str]:
    if not url:
        return url
    if url.startswith("http://") or url.startswith("https://"):
        return url
    return f"http://{url}"


def _resolve_settings(
    args: argparse.Namespace, config: Dict[str, Any]
) -> Dict[str, Any]:
    models_cfg: Dict[str, Any] = config.get("models", {}) or {}
    defaults_cfg: Dict[str, Any] = config.get("defaults", {}) or {}

    # model precedence: CLI > config.default_model > env > hardcoded default
    model = (
        args.model
        or config.get("default_model")
        or os.getenv("LLM_MODEL")
        or "qwen2.5-72b"
    )

    # base_url precedence: CLI > config.models[model].base_url > env > hardcoded default
    base_url = (
        args.base_url
        or (models_cfg.get(model, {}) or {}).get("base_url")
        or os.getenv("LLM_BASE_URL")
        or "http://156.22.30.238:1244"
    )
    base_url = _ensure_http_scheme(base_url)

    # temperature precedence: CLI > config.defaults.temperature > fallback 0.5
    temperature = (
        args.temperature
        if args.temperature is not None
        else defaults_cfg.get("temperature", 0.5)
    )

    resolved: Dict[str, Any] = {
        "model": model,
        "base_url": base_url,
        "temperature": temperature,
        # Defaults for optional params; CLI can still override per-call below
        "top_p": defaults_cfg.get("top_p"),
        "presence_penalty": defaults_cfg.get("presence_penalty"),
        "frequency_penalty": defaults_cfg.get("frequency_penalty"),
    }
    return resolved


def main() -> None:
    args = parse_args()

    config = _load_config(args.config)
    settings = _resolve_settings(args, config)

    client = LLMClient(base_url=settings["base_url"], default_model=settings["model"])

    system_prompt = get_system_prompt("jjjc_summarizer")
    user_prompt = get_user_prompt("case_1")

    # Build per-call kwargs with precedence: CLI > config.defaults
    call_kwargs: Dict[str, Any] = {}
    call_kwargs["temperature"] = (
        args.temperature
        if args.temperature is not None
        else settings.get("temperature")
    )
    if args.top_p is not None:
        call_kwargs["top_p"] = args.top_p
    elif settings.get("top_p") is not None:
        call_kwargs["top_p"] = settings["top_p"]

    if args.presence_penalty is not None:
        call_kwargs["presence_penalty"] = args.presence_penalty
    elif settings.get("presence_penalty") is not None:
        call_kwargs["presence_penalty"] = settings["presence_penalty"]

    if args.frequency_penalty is not None:
        call_kwargs["frequency_penalty"] = args.frequency_penalty
    elif settings.get("frequency_penalty") is not None:
        call_kwargs["frequency_penalty"] = settings["frequency_penalty"]

    result = client.chat(
        system_prompt=system_prompt,
        user_prompt=user_prompt,
        **call_kwargs,
    )

    print("==== Model Output ====")
    print(result.text)


if __name__ == "__main__":
    main()
