use log::info;
use rdkafka::config::RDKafkaLogLevel;
use rdkafka::consumer::{ConsumerContext, Rebalance, StreamConsumer};
use rdkafka::error::KafkaResult;
use rdkafka::{ClientConfig, ClientContext, TopicPartitionList};

// A type alias with your custom consumer can be created for convenience.
pub(crate) type LoggingConsumer = StreamConsumer<CustomContext>;

pub(crate) struct CustomContext;

impl ClientContext for CustomContext {}

impl ConsumerContext for CustomContext {
    fn pre_rebalance(&self, rebalance: &Rebalance) {
        info!("Pre rebalance {:?}", rebalance);
    }

    fn post_rebalance(&self, rebalance: &Rebalance) {
        info!("Post rebalance {:?}", rebalance);
    }

    fn commit_callback(&self, result: KafkaResult<()>, _offsets: &TopicPartitionList) {
        info!("Committing offsets: {:?}", result);
    }
}

pub(crate) fn make_consumer(
    brokers: &str,
    group_id: &str,
    username: Option<&str>,
    password: &str,
) -> LoggingConsumer {
    let username = username.unwrap_or(group_id);
    let context = CustomContext;
    ClientConfig::new()
        .set("group.id", group_id)
        .set("security.protocol", "SASL_SSL")
        .set("sasl.mechanism", "SCRAM-SHA-512")
        .set("sasl.username", username)
        .set("sasl.password", password)
        .set(
            "ssl.ca.location",
            "/usr/local/share/ca-certificates/Yandex/YandexInternalRootCA.crt",
        )
        .set("bootstrap.servers", brokers)
        .set("enable.partition.eof", "false")
        .set("session.timeout.ms", "6000")
        .set("enable.auto.commit", "true")
        .set("auto.offset.reset", "earliest")
        .set_log_level(RDKafkaLogLevel::Debug)
        .create_with_context::<CustomContext, LoggingConsumer>(context)
        .expect("Consumer creation failed:fff")
}
