#![allow(unused_variables)]

use std::{borrow::Borrow, collections::BTreeMap, iter::repeat, sync::Arc};

use axum::{
    Json, Router,
    extract::{Path, State},
    http::StatusCode,
    routing::{get, post},
};
use axum_macros::debug_handler;
use clap::{Arg, ArgAction, Command};
use rand_chacha::rand_core::SeedableRng;
use rand_chacha::ChaCha8Rng;
use rimec::{
    imec::{
        common::{
            GRPCMarkovChain, NextTokenDistributionProvider,
            markov_backend::{TokenizeRequest, continuation_client::ContinuationClient},
        },
        decode::decode,
        encode::encode,
    },
    markov::{MarkovChainInternal, STARTS_WITH_ALNUM, TokenIterator},
};
use tokio::sync::RwLock;
use tonic::transport::Channel;

extern crate slog;
extern crate slog_async;
extern crate slog_stream;
extern crate slog_term;

use std::time::Instant;

use serde::{Deserialize, Serialize};
use slog::Drain;
use slog::{Logger, debug, info, o};
use std::boxed::Box;

#[derive(Clone)]
struct AppState {
    status: Status,
    channels: BTreeMap<String, ChannelData>,
    logger: Logger,
    debug_logging: bool,
    provider: Provider, // default_ ...
}

type SharedState = Arc<RwLock<AppState>>;

#[derive(Clone)]
struct ChannelData {
    provider: Provider,
    rng: ChaCha8Rng,
    // Parameters
    parameters: Parameters,
    // pending_send_messages queue
    outbox: Vec<String>,
    inbox: Vec<String>, // => vec<Packet>
}

// Communication parameters for a single Channel
#[derive(Clone)]
struct Parameters {
    input_space_labels: Vec<u8>,
    cnext_start: Vec<String>,
}

#[derive(Serialize, Copy, Clone)]
struct Status {
    //
}

#[derive(Clone)]
enum Provider {
    Internal {
        provider: MarkovChainInternal,
        // tokenizer: Box<dyn Iterator<Item = String> + Sync + Send>,
    },
    External {
        provider: GRPCMarkovChain,
        backend_client: ContinuationClient<Channel>,
    },
}

use base64::prelude::*;

#[derive(Serialize, Deserialize, Clone)]
struct MessageToEncode {
    message: String, // base64
                     // channel_id: String,
}

#[derive(Serialize, Deserialize, Clone)]
struct EncodedMessage {
    text: String,
    perf: u128,
}

#[derive(Serialize, Deserialize, Clone)]
struct MessageToDecode {
    text: String,
    num_parts: usize,
}

#[derive(Serialize, Deserialize, Clone)]
struct DecodedMessage {
    message: String, // base64
    perf: u128,
}

#[derive(Serialize, Deserialize, Clone)]
struct ResetCommand {
    //
}

#[derive(Serialize, Deserialize, Clone)]
struct SetupCommand {
    start_string: String,
}

#[derive(Serialize, Deserialize, Clone)]
struct SetupResult {
    channel_id: String,
}

#[debug_handler]
async fn handler_home(State(state): State<SharedState>) -> Result<Json<Status>, StatusCode> {
    Ok(Json(state.read().await.status))
}

#[debug_handler]
async fn handler_setup(
    State(state): State<SharedState>,
    Json(payload): Json<SetupCommand>,
) -> Result<Json<SetupResult>, StatusCode> {
    let num_setups = state.read().await.channels.len();
    let new_id = num_setups + 1;

    // let rng = ChaCha8Rng::from_entropy();
    let rng = ChaCha8Rng::from_os_rng();
    let seed = rng.get_seed();
    info!(state.read().await.logger, "rng seed = {:?}", seed);

    let tokenized = TokenIterator::from_string(&payload.start_string)
        .map_err(|_| StatusCode::UNPROCESSABLE_ENTITY)?;
    // let padded = repeat("".to_owned()).take(up_to).chain(tokenized);
    let cnext_start = tokenized.collect::<Vec<_>>();

    let parameters = Parameters {
        input_space_labels: (0u8..=255).collect::<Vec<_>>(),
        cnext_start,
    };

    let provider = state.read().await.provider.clone();

    state.write().await.channels.insert(
        new_id.to_string(),
        ChannelData {
            provider,
            rng,
            parameters: parameters,
            outbox: vec![],
            inbox: vec![],
        },
    );
    Ok(Json(SetupResult {
        channel_id: new_id.to_string(),
    }))
}

#[debug_handler]
async fn handler_reset(
    State(state): State<SharedState>,
    Json(payload): Json<ResetCommand>,
) -> Result<Json<()>, StatusCode> {
    todo!();
}

#[debug_handler]
async fn handler_decode(
    State(state): State<SharedState>,
    Path(channel): Path<String>,
    Json(payload): Json<MessageToDecode>,
) -> Result<Json<DecodedMessage>, StatusCode> {
    let logger = &state.read().await.logger.clone();
    // let debug_logging = true; // state.read().await.debug_logging;
    let debug_logging = state.read().await.debug_logging;

    let communication_channel = state
        .write()
        .await
        .channels
        .get(&channel)
        .map(|x| x.clone())
        .ok_or(StatusCode::NOT_FOUND)?;

    let num_message_parts = payload.num_parts.clone();

    let input = payload.text;

    // let input = BASE64_STANDARD
    //     .decode(payload.text)
    //     .map_err(|e| StatusCode::UNPROCESSABLE_ENTITY)?;

    // let input = String::from_utf8(input).map_err(|e| StatusCode::UNPROCESSABLE_ENTITY)?;

    match &communication_channel.provider {
        Provider::Internal { provider } => {
            let start = Instant::now();

            // let tokenized = TokenIterator::from_string(&payload.text)
            //     .map_err(|_| StatusCode::UNPROCESSABLE_ENTITY)?;

            // println!("{:?}", tokenized.collect::<Vec<_>>());

            let tokenized =
                TokenIterator::from_string(&input).map_err(|_| StatusCode::UNPROCESSABLE_ENTITY)?;

            let decoded_string = decode(
                //        &mut encoded_string.into_iter(),
                &mut tokenized.into_iter(),
                &communication_channel.parameters.input_space_labels[..],
                &mut provider.clone(),
                num_message_parts,
                communication_channel.parameters.cnext_start.clone(),
                if debug_logging { Some(logger) } else { None },
                None,
            )
            .await
            .map_err(|x| StatusCode::INTERNAL_SERVER_ERROR)?;

            let decode_duration = start.elapsed();

            Ok(Json(DecodedMessage {
                message: BASE64_STANDARD.encode(decoded_string),
                perf: decode_duration.as_micros(),
            }))
        }

        Provider::External {
            provider,
            backend_client,
        } => todo!(),
    }
}

#[debug_handler]
async fn handler_encode(
    State(state): State<SharedState>,
    Path(channel): Path<String>,
    Json(payload): Json<MessageToEncode>,
) -> Result<Json<EncodedMessage>, StatusCode> {
    let input = BASE64_STANDARD
        .decode(payload.message)
        .map_err(|e| StatusCode::UNPROCESSABLE_ENTITY)?;

    let logger = &state.read().await.logger.clone();
    let debug_logging = state.read().await.borrow().debug_logging;

    let mut communication_channel = state
        .write()
        .await
        // .borrow_mut()
        .channels
        // .get(&payload.channel_id)
        .get(&channel)
        .map(|x| x.clone())
        .ok_or(StatusCode::NOT_FOUND)?;

    match &communication_channel.provider {
        Provider::Internal { provider } => {
            let start = Instant::now();

            let encoded_tokens = encode(
                &mut communication_channel.rng,
                &input,
                &communication_channel.parameters.input_space_labels[..],
                // FIXME if it is cloned here, its state will not be saved ...
                &mut provider.clone(),
                // TODO of course no cnext_start, but whatever it is set to
                // during the conversation! (tricky)
                communication_channel.parameters.cnext_start.clone(),
                if debug_logging { Some(logger) } else { None },
                None,
            )
            .await
            .map_err(|x| StatusCode::INTERNAL_SERVER_ERROR)?;

            let encoded_string = untokenize(encoded_tokens);

            let encode_duration = start.elapsed();

            Ok(Json(EncodedMessage {
                text: encoded_string, //  BASE64_STANDARD.encode(encoded_string),
                perf: encode_duration.as_micros(),
            }))
        }
        Provider::External {
            provider,
            backend_client,
        } => todo!(),
    }
    // Ok(Json(format!("got input of length {}", input.len())))
}

// curl -v -H 'Content-Type: application/json' 127.0.0.1:4710/setup -d '{"start_string": "Kuchen"}'
// curl -v -H 'Content-Type: application/json' 127.0.0.1:4710/encode/1 -d '{"message": "YXNkCg=="}'

#[tokio::main]
async fn main() -> Result<(), Box<dyn std::error::Error>> {
    println!("RiMEC");

    let matches = Command::new("rimec")
        .version("0.1.0")
        .about("Aims to implement iMEC [arXiv:2210.14889 cs.CR, 2023]")
        .arg(
            Arg::new("cover_port")
                .short('c')
                .help("Covertext service port"),
        )
        .arg(
            Arg::new("cover_type")
                .short('C')
                .long("cover_type")
                .action(ArgAction::Set)
                .help("Covertext provider"),
        )
        .arg(
            Arg::new("server")
                .short('s')
                .long("serve")
                .action(ArgAction::SetTrue)
                .help("Run REST server"),
        )
        .arg(
            Arg::new("debug")
                .short('D')
                .action(ArgAction::SetTrue)
                .help("Enable logging of hardcore debug info"),
        )
        .arg(
            Arg::new("test")
                .short('T')
                .action(ArgAction::SetTrue)
                .help("Just run test"),
        )
        .get_matches();

    let debug_logging = matches.get_flag("debug");

    let decorator = slog_term::TermDecorator::new().build();
    let drain = slog_term::FullFormat::new(decorator).build().fuse();
    let drain = slog_async::Async::new(drain)
        .overflow_strategy(slog_async::OverflowStrategy::Block)
        .build()
        .fuse();

    let logger = slog::Logger::root(drain, o!());

    let mut rng = ChaCha8Rng::from_os_rng();
    let seed = rng.get_seed();
    info!(logger, "rng seed = {:?}", seed);

    let up_to = 4;

    if matches.get_flag("server") {
        let provider = match matches.get_one::<String>("cover_type").map(|s| s.as_str()) {
            Some("gpt2") | Some("gpt") | None => create_external_provider().await?,
            Some("markov_ngram") => create_internal_provider(up_to).await?,
            Some(_) => Err(clap::Error::new(clap::error::ErrorKind::InvalidValue))?,
        };

        // let start_string = "Kuchen".to_string();
        // // "This is a stupid, boring way to start a conversation. ".to_string();
        // let tokenized = TokenIterator::from_string(&start_string)?;
        // let padded = repeat("X".to_owned()).take(up_to).chain(tokenized);
        // let cnext_start = padded.collect::<Vec<_>>();

        // SETUP tmp
        let app_state = AppState {
            status: Status {},
            provider,
            // rng,
            channels: BTreeMap::new(),
            logger: logger.clone(),
            debug_logging,
        };

        let shared_state = Arc::new(RwLock::new(app_state));

        // TODO conceptual problem: it must be possible to save encoder/decoder state.
        // and do trial decodings ... hm.
        // : Router<AppState>
        let app = Router::new()
            .route("/", get(handler_home))
            // TODO serialize, deserialize too!
            // TODO add peer ~ with its own state! TODO the other routes must accept peer ID.
            // TODO get messages - remove because processed ....
            // state must be extended to allow several of these ...
            // TODO checkpoint ~ must also be supported ... or recreatable.
            .route("/setup", post(handler_setup))
            .route("/reset", post(handler_reset))
            .route("/encode/:channel", post(handler_encode))
            .route("/decode/:channel", post(handler_decode))
            .with_state(Arc::clone(&shared_state));

        let listener = tokio::net::TcpListener::bind("127.0.0.1:4710")
            .await
            .unwrap();

        debug!(logger, "listening on {}", listener.local_addr().unwrap());

        axum::serve(listener, app).await?;
    } else {
        match matches.get_one::<String>("cover_type").map(|s| s.as_str()) {
            Some("gpt") | None => {
                if let Provider::External {
                    provider,
                    backend_client,
                } = create_external_provider().await?
                {
                    if matches.get_flag("test") {
                        run_test_gpt2(&mut rng, provider, debug_logging, logger, backend_client)
                            .await?;
                    }
                } else {
                    unreachable!();
                }
            }
            Some("markov_ngram") => {
                if matches.get_flag("test") {
                    run_test_internal(
                        &mut rng,
                        // provider,
                        debug_logging,
                        logger,
                        // markov_backend_client,
                    )
                    .await?;
                }
            }
            Some(_) => Err(clap::Error::new(clap::error::ErrorKind::InvalidValue))?,
        };
    }

    Ok(())
}

async fn create_internal_provider(up_to: usize) -> Result<Provider, anyhow::Error> {
    Ok(Provider::Internal {
        provider: MarkovChainInternal::from_file("sample.txt", up_to)?,
    })
}

async fn create_external_provider() -> Result<Provider, anyhow::Error> {
    let backend_channel = tonic::transport::Channel::from_static("http://[::1]:50051")
        .connect()
        .await?;

    let backend_client = ContinuationClient::new(backend_channel);

    Ok(Provider::External {
        provider: GRPCMarkovChain {
            backend_client: backend_client.clone(),
        },
        backend_client,
    })
}

// TODO combine into one paramtric function

async fn run_test_internal(
    rng: &mut ChaCha8Rng,
    // mut provider: impl NextTokenDistributionProvider<String, Vec<String>>,
    debug_logging: bool,
    logger: Logger,
) -> Result<(), anyhow::Error> {
    let up_to = 4;
    let mut provider = MarkovChainInternal::from_file("sample.txt", up_to)?;

    // println!("{:?}", provider);

    let input_space_labels = (0u8..=255).collect::<Vec<_>>();
    let input: Vec<u8> = "Hello Stego".as_bytes().to_owned();
    //     0, 1, 10, 0, 29, 99, 254, 13, 255, 255, 0, 25, 100, 125, 150, 175, 33,
    // ];

    let start_string = "Die".to_string();
    // "This is a stupid, boring way to start a conversation. ".to_string();
    let tokenized = TokenIterator::from_string(&start_string)?;
    let padded = repeat("X".to_owned()).take(up_to).chain(tokenized);
    let cnext_start = padded.collect::<Vec<_>>();

    let start = Instant::now();

    let encoded_string = encode(
        rng,
        &input,
        &input_space_labels[..],
        &mut provider,
        cnext_start.clone(),
        if debug_logging { Some(&logger) } else { None },
        None,
    )
    .await?;

    let encode_duration = start.elapsed();

    debug!(logger, "encoded in {}ms", encode_duration.as_millis());
    debug!(logger, "{:?}", encoded_string);

    let encoded_string = untokenize(encoded_string);

    // let tokenized = TokenIterator::from_string(&encoded_string)?;
    // let tokenized = tokenized.collect::<Vec<_>>();
    // debug!(logger, "{:?}", tokenized);
    // info!(logger, "{:?}", untokenize(tokenized));

    let mut tokenized = TokenIterator::from_string(&encoded_string)?;

    let start = Instant::now();

    let decoded_string = decode(
        //        &mut encoded_string.into_iter(),
        // &mut encoded_string.into_iter(),
        &mut tokenized,
        &input_space_labels[..],
        &mut provider,
        input.len(),
        cnext_start,
        if debug_logging { Some(&logger) } else { None },
        None,
    )
    .await?;

    let decode_duration = start.elapsed();

    debug!(logger, "decoded in {}ms", decode_duration.as_millis());
    println!("decoded: {:?}", decoded_string);
    println!("input  : {:?}", input);
    println!("correct? {}", decoded_string == input);

    Ok(())
}

fn untokenize(encoded_tokens: Vec<String>) -> String {
    let encoded_string = encoded_tokens
        .iter()
        .fold("".to_owned(), |acc, e| {
            if STARTS_WITH_ALNUM.is_match(e) && !(acc.is_empty() || acc.ends_with('('))
                || e.starts_with('(')
            {
                acc + " " + e
            } else {
                acc + e
            }
        })
        .to_owned();
    encoded_string
}

// TODO unify !!

async fn run_test_gpt2(
    rng: &mut ChaCha8Rng,
    mut provider: impl NextTokenDistributionProvider<String, String>,
    debug_logging: bool,
    logger: Logger,
    mut markov_backend_client: ContinuationClient<tonic::transport::Channel>,
) -> Result<(), anyhow::Error> {
    info!(logger, "Running encode - decode test.");

    let input_space_labels = (0u8..=255).collect::<Vec<_>>();
    let input: Vec<u8> = vec![
        0, 1, 10, 0, 29, 99, 254, 13, 255, 255, 0, 25, 100, 125, 150, 175, 33,
    ];

    let start_string = "This is a stupid, boring way to start a conversation. ".to_string();

    let start = Instant::now();

    let encoded_string = encode(
        rng,
        &input,
        &input_space_labels[..],
        &mut provider,
        start_string.clone(),
        if debug_logging { Some(&logger) } else { None },
        None,
    )
    .await?;

    let encode_duration = start.elapsed();

    let joined_encoded_string = encoded_string.join("");

    let tokenized_encoded_string = markov_backend_client
        .tokenize(TokenizeRequest {
            wholestring: joined_encoded_string,
        })
        .await?;

    let tokenized = tokenized_encoded_string.into_inner().tokens;

    debug!(logger, "encoded in {}s", encode_duration.as_secs());
    debug!(logger, "{:?}", encoded_string);
    debug!(logger, "{:?}", encoded_string.join(""));
    debug!(logger, "{:?}", tokenized);
    info!(logger, "{:?}", tokenized.join(""));

    let start = Instant::now();

    let decoded_string = decode(
        //        &mut encoded_string.into_iter(),
        &mut tokenized.into_iter(),
        &input_space_labels[..],
        &mut provider,
        input.len(),
        start_string,
        if debug_logging { Some(&logger) } else { None },
        None,
    )
    .await?;

    let decode_duration = start.elapsed();

    debug!(logger, "decoded in {}s", decode_duration.as_secs());
    println!("decoded: {:?}", decoded_string);
    println!("input  : {:?}", input);

    Ok(())
}
