use std::io::Write;
use anyhow::Result;
use llm_sdk::chat::ChatMessage;
use edagent::llm::chat::ChatLLM;

#[tokio::main]
async fn main() -> Result<()> {
    let llm = ChatLLM::load_config("./config/config.toml")?;

    let mut messages = vec![
        ChatMessage::system("You are a helpful assistant.")
    ];

    loop {
        print!("User: ");
        std::io::stdout().flush()?;

        let mut user_message = String::new();
        std::io::stdin().read_line(&mut user_message)?;
        if user_message == "quit\n" {
            break;
        }

        messages.push(ChatMessage::user(user_message));
        let response = llm.ask_messages(messages.clone()).await?;
        println!("Ai:  {}", if response.content.is_none() { "" } else { response.content.as_ref().unwrap() } );

        messages.push(ChatMessage::Assistant(response));
    }

    Ok(())
}