mod kb;
mod messages_util;
pub mod openai;

pub use kb::{append_to_kb, get_kb_message, is_kb_duplicated, list_kb_number};
use messages_util::{
    create_assistant_message, create_user_message, get_latest_messages, load_messages,
    prepare_messages, save_messages, sort_messages,
};
use openai::chat;

use crate::models::{Message, ProjectConfig};
use anyhow::Result;
use std::fs;
use std::path::Path;

const OUTPUT_FILE: &str = "output.md";

pub async fn handle_question(
    system_message: &Message,
    question: &str,
    history_count: usize,
    working_path: &Path,
    to_kb: &Option<&String>,
    kb_number: &Option<usize>,
    temperature: f32,
    project_config: &Option<ProjectConfig>,
) -> Result<String> {
    let mut messages = load_messages(working_path)?;

    let (system_message, new_question) = prepare_messages(system_message, question, kb_number);
    println!("system message:\n{}", &system_message.content);
    let new_message = create_user_message(&new_question);

    let response_message: String = chat(
        &[
            vec![system_message.clone()],
            get_latest_messages(&messages, history_count),
            vec![new_message.clone()],
        ]
        .concat()
        .iter()
        .map(From::from)
        .collect::<Vec<_>>(),
        temperature,
        project_config,
    )
    .await?;

    if to_kb.is_none() {
        messages.push(new_message.clone());
        messages.push(create_assistant_message(&response_message));
        fs::write(OUTPUT_FILE, response_message.clone())?;
    } else {
        append_to_kb(to_kb, &new_question, &response_message)?;
    }

    save_messages(&sort_messages(&messages))?;
    Ok(response_message)
}
