import logging
import time 

from openai import OpenAI, APITimeoutError, APIConnectionError, RateLimitError, APIStatusError
from config import *
from tenacity import retry, stop_after_attempt, wait_exponential, retry_if_exception_type

logger = logging.getLogger(__name__)

AGGREGATION_PROMPT_TEMPLATE_EN = \
    """
    You must answer in English.
    
    You will receive two pieces of information: New Information is detailed, and Existing Information is a summary from (n_children) previous entries.
    Your task is to merge these into a single, cohesive summary that highlights the most important insights.
    Focus on the key points from both inputs.
    
    Ensure the final summary combines the insights from both pieces of information.
    If the number of previous entries in Existing Information is accumulating (more than 2), focus on summarizing more concisely, only capturing the overarching theme, and getting more abstract in your summary.
    Output the summary directly.
    
    [New Information]
    {new_content}
    
    [Existing Information (from {n_children} previous entries)]
    {current_content}
    
    [Output Summary]
    """

AGGREGATION_PROMPT_TEMPLATE_ZH = \
    """
    你将会收到两条信息：“新信息”是详细的，“现有信息”是一个来自({n_children})个先前条目的摘要。
    你的任务是将这两条信息合并成一个单一、连贯的摘要，并突出最重要的见解。
    请关注两条输入的要点。
    
    确保最终的摘要结合了来自两条信息的见解。
    如果“现有信息”中先前条目的数量正在累积（超过2个），请专注于更简洁地进行总结，只捕捉首要的主题，并在摘要中变得更抽象。
    请直接输出摘要。
    
    [新信息]
    {new_content}
    
    [现有信息 (来自 {n_children} 个先前条目)]
    {current_content}
    
    [输出摘要]
    """

# QUERY_INSTRUCTION = "Represent this sentence for searching relevant passages: "

RETRYABLE_EXCEPTIONS = (
    APITimeoutError,
    APIConnectionError,
    RateLimitError
)

client = OpenAI(
    base_url=AGGREGATION_MODEL_API_BASE,
    api_key=AGGREGATION_MODEL_API_KEY
)

embedding_client = OpenAI(
    base_url=EMBEDDING_API_BASE,
    api_key=EMBEDDING_API_KEY
)


@retry(
    stop=stop_after_attempt(3),
    wait=wait_exponential(multiplier=1, min=1, max=10),
    retry=retry_if_exception_type(RETRYABLE_EXCEPTIONS)
)
@retry(
    stop=stop_after_attempt(3),
    wait=wait_exponential(multiplier=1, min=1, max=10),
    retry=retry_if_exception_type(RETRYABLE_EXCEPTIONS)
)
def invoke_embedding(texts: list[str]) -> list[list[float]]:
    start_time = time.time()

    cleaned_texts: list[str] = [text.replace("\n", " ") for text in texts]
    response = embedding_client.embeddings.create(input=cleaned_texts, model=EMBEDDING_MODEL_NAME)

    end_time = time.time()
    duration = end_time - start_time
    logger.info(f"嵌入任务在 {duration:.2f} 秒内完成。")

    return [data.embedding for data in response.data]


@retry(
    stop=stop_after_attempt(3),
    wait=wait_exponential(multiplier=1, min=1, max=10),
    retry=retry_if_exception_type(RETRYABLE_EXCEPTIONS)
)
def invoke_aggregation(
        new_content: str,
        current_content: str,
        n_children: int,
        use_chinese_prompt: bool = True
) -> str:
    start_time = time.time()
    logger.info("Invoking aggregation (summarization)...")

    prompt_template = AGGREGATION_PROMPT_TEMPLATE_ZH if use_chinese_prompt else AGGREGATION_PROMPT_TEMPLATE_EN

    prompt = prompt_template.format(
        n_children=n_children,
        new_content=new_content,
        current_content=current_content
    )

    response = client.chat.completions.create(
        model=AGGREGATION_MODEL_NAME,
        messages=[
            {"role": "user", "content": prompt}
        ],
        temperature=0.0,
        max_tokens=500,
    )
    summary = response.choices[0].message.content.strip()

    end_time = time.time()
    duration = end_time - start_time
    logger.info(f"Aggregation completed in {duration:.2f} seconds.")

    return summary