import base64
from pathlib import Path
from loguru import logger

from src.models.article import Article
from src.utils import prompts
from src.services.llm_client import send_async_requests


def read_articles(articles: list[Article], model: str = "openai/gpt-4.1", output_dir: Path | None = None):
    """
    Use LLM to read articles and generate AI summaries.
    Args:
        articles: List of Article objects to read
        model: The model to use for reading and summarizing (default: "openai/gpt-4.1").
        output_dir: Directory to save the AI summaries.
    """
    all_messages = []

    plugins = [
        {
            "id": "file-parser",
            "pdf": {
                "engine": "pdf-text"
            }
        }
    ]

    for article in articles:

        if article.pdf_file is None:
            logger.warning(f"Article {article.id} does not have a PDF file downloaded")
            continue
        with open(article.pdf_file, "rb") as pdf_file:
            pdf_base64 = base64.b64encode(pdf_file.read()).decode('utf-8')
        data_url = f"data:application/pdf;base64,{pdf_base64}"

        messages = [
            {"role": "system", "content": prompts.system_prompt_md},
            {
                "role": "user",
                "content": [
                    {
                        "type": "text",
                        "text": prompts.user_prompt_read
                    },
                    {
                        "type": "file",
                        "file": {
                            "filename": article.pdf_file.name,
                            "file_data": data_url
                        }
                    },
                ]
            }
        ]
        all_messages.append(messages)

    logger.info(f"Sending {len(all_messages)} articles to the model. Please wait...")
    responses = send_async_requests(all_messages, model=model, plugins=plugins, max_concurrent=5)

    valid_num = 0
    for article, response in zip(articles, responses):
        if response is None:
            continue
        article.ai_summary = response
        if output_dir is not None:
            article.write_ai_summary(Path(output_dir))
        valid_num += 1
    logger.info(f"Received {valid_num} responses from the model")
    if output_dir is not None:
        logger.info(f"AI summaries saved to {Path(output_dir).resolve()}")
