#![allow(unused_imports)]

use dotenv::dotenv;
use std::env::var ;
use std::net::SocketAddr;
use std::collections::{HashSet,HashMap} ;
use std::sync::{Arc, Mutex};

use futures::future::{ready};
use futures::future ;
use futures::stream::{StreamExt};

use rdkafka::client::ClientContext;
use rdkafka::consumer::{CommitMode, Consumer, ConsumerContext, Rebalance};
use rdkafka::error::KafkaResult;
use rdkafka::topic_partition_list::TopicPartitionList;
use rdkafka::consumer::stream_consumer::StreamConsumer;

use rdkafka::message::{Message} ;
use rdkafka::config::ClientConfig;
use rdkafka::producer::{FutureProducer, FutureRecord};
use schema_registry_converter::async_impl::schema_registry::SrSettings;
use schema_registry_converter::async_impl::avro::AvroDecoder ;
use schema_registry_converter::types::{Value as AvroValue};

use sqlx::ConnectOptions ;
use sqlx::postgres::{
    PgConnectOptions, PgConnection, PgDatabaseError, PgErrorPosition, PgSeverity,
};
use sqlx::postgres::{PgPoolOptions, PgRow, Postgres};

use serde_json ;
use std::str::FromStr;
use utils::* ;

mod kafka {
    use super::* ;

    pub struct CustomContext;
    impl ClientContext for CustomContext {}
    impl ConsumerContext for CustomContext {
        fn pre_rebalance(&self, _rebalance: &Rebalance) {
        }
        
        fn post_rebalance(&self, _rebalance: &Rebalance) {
        }

        fn commit_callback(&self, _result: KafkaResult<()>, _offsets: &TopicPartitionList) {
        }
    }

    pub fn mk_sr(hosts: &str) -> SrSettings {
        let mut sr_hosts = hosts.split(",") ;
        let first_sr_host = sr_hosts.next().expect("at least one ACCESS_KAFKA_SR_HOSTS needed!") ;
        let mut sr_settings_builder = SrSettings::new_builder(first_sr_host.into()) ;
        sr_hosts.for_each(|x| {sr_settings_builder.add_url(x.into());}) ;
        sr_settings_builder.build().expect("kafka-sr build error!")
    }
}

mod utils {
    use super::* ;
    pub fn resolve_socket_addr(hosts: &str) -> Option<SocketAddr> {
        let ips = get_if_addrs::get_if_addrs().unwrap().iter()
            .map(|interface| interface.ip()).collect::<HashSet<_>>();
        hosts.split(",")
            .map(|x| x.replace("http://", ""))
            .filter_map(|x| x.parse::<SocketAddr>().ok())
            .filter(|x| ips.contains(&x.ip()))
            .next()
    }

}

mod avro {
    use super::* ;

    pub fn as_map(v: &AvroValue) -> HashMap<String, AvroValue> {
        match v {
            AvroValue::Record(_, items) => items.to_owned().into_iter().collect(),
            _ => HashMap::new(),
        }
    }

    pub fn as_int(v: &AvroValue) -> Option<i32> {
        match *v {
            AvroValue::Int(i) => Some(i),
            _ => None,
        }
    }
    pub fn as_string(v: &AvroValue) -> Option<String> {
        match v {
            AvroValue::String(s) => Some(s.to_string()),
            AvroValue::Union(b) => {
                match &**b {
                    AvroValue::String(s) => Some(s.to_string()),
                    _ => None
                }
            },
            _ => None,
        }
    }
}

pub async fn run() -> Result<(), Box<dyn std::error::Error>> {
    dotenv().ok();

    let env_hosts = var("EXCHANGE_DC_EVENT_ENUM_HOSTS").unwrap_or("127.0.0.1:0".into()) ;
    if resolve_socket_addr(&env_hosts).is_none() {
        println!("EXCHANGE_DC_EVENT_ENUM_HOSTS not match! skip") ;
        return Ok(()) ;
    }

    let env_kafkas = var("EXCHANGE_DC_EVENT_ENUM_IN_KAFKAS").unwrap_or("127.0.0.1:9092".into()) ;
    let env_kafka_prefix = var("EXCHANGE_DC_EVENT_ENUM_IN_KAFKA_PREFIX").unwrap_or("data_pipeline_dev".into()) ;
    let env_kafka_srs = var("EXCHANGE_DC_EVENT_ENUM_IN_KAFKA_SRS").unwrap_or("127.0.0.1:8081".into()) ;
    let env_pgs = var("EXCHANGE_DC_EVENT_ENUM_OUT_PGS").unwrap_or("127.0.0.1:5432".into()) ;
    let env_pg_username = var("EXCHANGE_DC_EVENT_ENUM_OUT_PG_USERNAME").expect("NO EXCHANGE_DC_EVENT_ENUM_OUT_PG_USERNAME FOUND!") ;
    let env_pg_password = var("EXCHANGE_DC_EVENT_ENUM_OUT_PG_PASSWORD").expect("NO EXCHANGE_DC_EVENT_ENUM_OUT_PG_PASSWORD FOUND!") ;

    let env_pg_prefix = var("EXCHANGE_DC_EVENT_ENUM_OUT_PG_PREFIX").unwrap_or("postgres".to_string()) ;
    let env_pg_schema = var("EXCHANGE_DC_EVENT_ENUM_OUT_PG_SCHEMA").unwrap_or("dev".to_string()) ;

    println!("connect pg: {}", &env_pgs) ; 

    let pg_uri = format!("postgres://{}:{}@{}/{}", &env_pg_username, &env_pg_password, &env_pgs, &env_pg_prefix) ;
    let out_pg_conn = PgConnectOptions::from_str(&pg_uri).unwrap().connect().await? ;

    let out_pg_conn_arc = &Arc::new(Mutex::new(out_pg_conn)) ;

    let decoder = AvroDecoder::new(kafka::mk_sr(&env_kafka_srs));
    let decoder_arc = &Arc::new(Mutex::new(decoder));

    let in_topic = format!("{}_event_enum_dim", env_kafka_prefix) ;
    let consumer: StreamConsumer<kafka::CustomContext> = ClientConfig::new()
        .set("group.id", &format!("{}:exchange-dc-event-enum", in_topic))
        .set("bootstrap.servers", &env_kafkas)
        .set("auto.offset.reset", "smallest")
        .set("enable.auto.commit", "true")
        .create_with_context(kafka::CustomContext)
        .expect("Consumer creation failed") ;
    consumer.subscribe(&[in_topic.as_str()][..]).expect("Can't subscribe to specified topics") ;

    let env_pg_schema_ref = &env_pg_schema ;
    consumer.start()
        .filter_map( |x| future::ready( x.ok() ) )
        .filter_map( |x| async move {
            let decode_one = decoder_arc.clone() ;
            let mut decode_mut = decode_one.lock().unwrap() ;
            decode_mut.decode(x.payload()).await.ok()
        }).for_each(|x| async move {
            let row = avro::as_map(&x.value) ;
            println!("row: {:?}", row) ;

            let pg_conn = out_pg_conn_arc.clone() ;
            let mut pg_conn_mut = pg_conn.lock().unwrap() ;

            let mut jsonb = HashMap::new();
            for (key, val) in  avro::as_map(row.get("extentions").unwrap()) {
                jsonb.insert(key, avro::as_string(&val).unwrap()) ;
            }
            sqlx::query(&format!("
                 INSERT INTO {}.dc_reported_track_event( 
                   event_enum_key, 
                   project_id, app_id, page_type, sdk_version, event_type, 
                   event_code, event_code_md5, event_name, element_path, element_content, 
                   send_type, tenant_id, current_page_url, current_page_path, current_page_name, 
                   extentions, app_version
                 ) VALUES ( 
                    $1,
                    $2, $3, $4, $5, $6, 
                    $7, $8, $9, $10, $11, 
                    $12, $13, $14, $15, $16, 
                    $17, $18
                 )", env_pg_schema_ref)
            )
            .bind(avro::as_int(row.get("event_enum_key").unwrap()))
            .bind(avro::as_string(row.get("project_id").unwrap()))
            .bind(avro::as_string(row.get("app_id").unwrap()))
            .bind(avro::as_string(row.get("page_type").unwrap()))
            .bind(avro::as_string(row.get("sdk_version").unwrap()))
            .bind(avro::as_string(row.get("event_type").unwrap()))
            .bind(avro::as_string(row.get("event_code").unwrap()))
            .bind(avro::as_string(row.get("event_code_md5").unwrap()))
            .bind(avro::as_string(row.get("event_name").unwrap()))
            .bind(avro::as_string(row.get("element_path").unwrap()))
            .bind(avro::as_string(row.get("element_content").unwrap()))
            .bind(avro::as_string(row.get("send_type").unwrap()))
            .bind(avro::as_string(row.get("tenant_id").unwrap()))
            .bind(avro::as_string(row.get("current_page_url").unwrap()))
            .bind(avro::as_string(row.get("current_page_path").unwrap()))
            .bind(avro::as_string(row.get("current_page_name").unwrap()))
            .bind(serde_json::to_value(jsonb).unwrap())
            .bind(avro::as_string(row.get("app_version").unwrap()))
            .execute( &mut *pg_conn_mut )
            .await.unwrap() ;

        }).await ;

    Ok(())
    
}
