use log::{info, warn};

use avro_rs::to_avro_datum ;

use rdkafka::message::{Headers, Message};
use rdkafka::consumer::{CommitMode, Consumer, ConsumerContext, Rebalance};
use rdkafka::consumer::stream_consumer::StreamConsumer;
use rdkafka::config::ClientConfig;

mod kafka {
    use rdkafka::client::ClientContext;
    use rdkafka::consumer::{CommitMode, Consumer, ConsumerContext, Rebalance} ;

    pub struct CustomContext;
    impl ClientContext for CustomContext {}
    impl ConsumerContext for CustomContext {}
}

mod api {
    pub fn get_schema(apiServerNodes: &str) -> String {
        "".to_string()
    }
}
#[tokio::main]
async fn main() -> Result<(), Box<dyn std::error::Error>> {
    env_logger::init();
    dotenv::from_filename("meta-lib-static/env/.env").ok();

    let kafkas = "10.25.21.41:9092,10.25.21.42:9092,10.25.21.43:9092".to_string();
    let kafka_srs = "http://10.25.21.41:8081,http://10.25.21.42:8081,http://10.25.21.43:8081".to_string() ;
    let in_topic = "data_pipeline_dev_dc_sdk_push".to_string() ;

    let consumer: StreamConsumer<kafka::CustomContext> = ClientConfig::new()
        .set("bootstrap.servers", &kafkas)
        .set("group.id", "enum-server-rust:dashboard_menu") 
        .set("auto.offset.reset", "smallest")
        .set("enable.auto.commit", "true")
        .create_with_context(kafka::CustomContext)
        .expect("Consumer creation failed");

    consumer.subscribe(&[in_topic.as_str()][..]).expect("Can't subscribe to specified topics") ;


    loop {
        match consumer.recv().await {
            Err(e) => warn!("Kafka error: {}", e),
            Ok(m) => {
                let payload = m.payload().unwrap() ;
                // let avro = decoder.decode(m.payload()).await.unwrap() ;
                let avro = to_avro_datum(unimplemented!(), payload) ;
                

            }
                
        }
    }


    Ok(())
}
