#![allow(non_snake_case)]
pub mod chatgml;
pub mod chatgpt;
pub mod prompt;
pub mod code2prompt;
pub mod splitter;
pub mod c2rust;

pub use c2rust::{TranscodeParams, TranscodePathParams};

#[macro_use]
pub mod utils;

use chatgml::{sse_invoke_method::sse_invoke::constant_value::API_KEY, C2RustGLM};
use chatgpt::chat_invoke;

#[derive(Debug, Clone, Copy)]
pub enum BigModel {
    ChatGml,
    ChatGpt
}

pub async fn chat(prompt: &String, glm: Option<&mut C2RustGLM>, model: BigModel, name: String)-> String {
    println!("start call chat function : {}; model: {:?}", name, model);
    match model {
        BigModel::ChatGml => {
            if let Some(glm) = glm {
                glm
                .sse_invoke_calling(API_KEY, prompt)
                .await;
                glm.get_ai_response()
            } else {
                "Error Model".to_string()
            }
        }
        BigModel::ChatGpt => {
            match chat_invoke(prompt).await {
                Ok(res) => res,
                Err(e) => {
                    println!("Error: {:?}", e);
                    "Error".to_string()
                }
            }
        }
    }
}

pub async fn chat_c2rust(trans_codes: &TranscodeParams) -> Result<TranscodeParams, String> {
    c2rust::c2rust(trans_codes).await
}
