from pathlib import Path
from loguru import logger

from src.models.article import Article
from src.services.llm_client import send_async_requests
from src.utils import prompts


def summarize_articles(
    articles: list[Article], model: str = "openai/gpt-4.1", output_dir: Path | None = None
) -> None:
    text_msg = []
    valid_num = 0
    for article in articles:
        if article.ai_summary is None:
            continue
        article_msg = ["\n---\n"]
        article_msg.append(f"**Title:** {article.title}\n")
        article_msg.append(f"**Authors:** {', '.join(article.authors)}\n")
        article_msg.append(f"**Summary:**\n{article.ai_summary}\n")
        text_msg += "".join(article_msg)
        valid_num += 1

    text_msg = prompts.user_prompt_summary.format(num_articles=valid_num) + "".join(text_msg)

    logger.info(f"Generating summary report for {valid_num} articles...")
    messages = [{"role": "system", "content": prompts.system_prompt_md}, {"role": "user", "content": text_msg}]

    responses = send_async_requests([messages], model=model)
    if len(responses) != 1 or responses[0] is None:
        logger.error("Failed to generate summary report")
        return
    if output_dir is not None:
        Path.mkdir(output_dir, exist_ok=True)
        with open(output_dir / "report.md", "w", encoding="utf-8") as f:
            f.write(responses[0])
