use std::{
    collections::BTreeMap,
    fmt::{Debug, Display},
};

#[cfg(test)]
extern crate quickcheck;

extern crate rand;
use rand::{distr::weighted::WeightedIndex, prelude::*};

use ordered_float::NotNan;

use rand_chacha::ChaCha8Rng;
use tokio::sync::mpsc::UnboundedSender;

use crate::{
    DiscreteDistribution, Entropic,
    common::{
        DataOfInterest, NextTokenDistributionProvider, compute_marginals, log_mu_table,
        make_uniform, pick_istar,
    },
    min_entropy_joint_distribution_sparse, order_descending,
};

pub async fn encode<A: Clone + std::fmt::Display + Eq, W: Clone, V>(
    rng: &mut ChaCha8Rng,
    // not a stream, but a vec, because it needs random access!
    // and knows number of input parts
    xs: &[A],

    message_space_labels: &[A],

    cnext: &mut impl NextTokenDistributionProvider<W, V>,
    cnext_start: V,

    logger: Option<&slog::Logger>,
    tx: Option<&UnboundedSender<DataOfInterest>>,
) -> Result<Vec<W>, anyhow::Error>
where
    A: Debug,
    W: Debug,
{
    cnext.reset(cnext_start).await?;

    let should_stop = |mu: &[DiscreteDistribution<_>]| {
        mu.iter()
            .all(|distro: &DiscreteDistribution<_>| distro.d.entropy() < 0.1)
    };

    let n_input: usize = xs.len();
    let message_space_size = message_space_labels.len();

    // Initialize uniform distribution over |X| (X = input alphabet) for each input symbol.
    let mut mu = (0..n_input)
        .map(|_| {
            make_uniform(
                message_space_labels.len(),
                Some(message_space_labels.to_vec()),
            )
        })
        .collect::<Vec<_>>();
    let mut done = false;
    let mut output = vec![];

    let mut itercount = 0;

    if let Some(logger) = logger {
        info!(
            logger,
            "--------- start encode, |input alphabet| = {}", message_space_size
        );
    }

    if let Some(tx) = tx {
        tx.send(DataOfInterest::EncodeStart())?;
    }

    log_mu_table(tx, &mu);

    //     let BREAK_DEBUG = 30;

    while !done {
        //  itercount < BREAK_DEBUG  && !done {
        if let Some(logger) = logger {
            info!(logger, "--------- encode iteration {}", itercount);
        }
        if let Some(tx) = tx {
            tx.send(DataOfInterest::EncodeIteration(itercount))?;
        }

        done = encode_step(
            rng,
            &mut mu,
            cnext,
            xs,
            &mut output,
            message_space_labels,
            should_stop,
            logger,
            tx,
        )
        .await?;

        itercount += 1;
    }

    tx.map(|tx| tx.send(DataOfInterest::EncodeResult(format!("{:?}", output))));

    Ok(output)
}

async fn encode_step<A: Clone + Display + Eq + Debug, W: Clone + Debug, V>(
    mut rng: &mut ChaCha8Rng,
    mu: &mut [DiscreteDistribution<A>],
    cnext: &mut impl NextTokenDistributionProvider<W, V>,
    xs: &[A],
    output: &mut Vec<W>,
    message_space_labels: &[A],
    should_stop: impl Fn(&[DiscreteDistribution<A>]) -> bool,
    logger: Option<&slog::Logger>,
    tx: Option<&UnboundedSender<DataOfInterest>>,
) -> Result<bool, anyhow::Error> {
    let message_space_size = message_space_labels.len();

    let istar = pick_istar(mu);

    if let Some(logger) = logger {
        info!(logger, "picking istar={}", istar);
    }

    tx.map(|tx| tx.send(DataOfInterest::IStar(istar)));

    let mut next_token_distribution = cnext.get().await?;
    order_descending(&mut next_token_distribution);

    let n = next_token_distribution.d.len();

    if let Some(logger) = logger {
        info!(logger, "computing MEJD");
        info!(
            logger,
            "n={}, next token distro C(C_j | ...) = {:?}",
            n,
            next_token_distribution.clone()
        );
        info!(logger, "mu distro = {:?}", mu[istar]);
    }

    let (gammaj, swap_p_q) =
        min_entropy_joint_distribution_sparse(next_token_distribution.d, mu[istar].d.clone());

    if let Some(logger) = logger {
        debug!(logger, "gammaj: {:?}, swap={}", gammaj, swap_p_q);
        debug!(logger, "marginals = {:?}", compute_marginals(&gammaj));
    }

    if let Some(tx) = tx {
        tx.send(DataOfInterest::GammaTable(gammaj.clone()))?;
    }

    let i_to_encode = xs[istar].clone();

    if let Some(logger) = logger {
        info!(
            logger,
            "symbol to encode: {}, istar = {}",
            i_to_encode,
            istar // , input alphabet size: {} , message_space_size
        );
        info!(logger, "mu[istar] = {:?}", mu[istar]);
        info!(logger, "{}, {:?}", swap_p_q, gammaj.iter());
    }

    let weights_map: BTreeMap<usize, f64> = BTreeMap::from_iter(
        gammaj
            .iter()
            .filter(|(_, (i, iprime))| {
                mu[istar]
                    .labels
                    .as_ref()
                    .unwrap()
                    .get(*(if swap_p_q { i } else { iprime }))
                    .cloned()
                    == Some(i_to_encode.clone())
            })
            .map(|(x, (jprime, j))| (*(if swap_p_q { j } else { jprime }), *x)),
    );

    if let Some(logger) = logger {
        info!(
            logger,
            "weights_map for next output symbol distro: {:?}", weights_map
        );
    }

    // println!("weights_map: {:?}", weights_map);

    let weights = (0..n).map(|j| weights_map.get(&j).unwrap_or(&0.0)); //  / norm_factor);

    if let Some(logger) = logger {
        info!(logger, "weights: {:?}", weights.clone().collect::<Vec<_>>());
    }

    // println!("weights: {:?}", weights.clone().collect::<Vec<_>>());

    // no need to normalize manually, WeightedIndex should do the trick.
    let gammaj_given_istar_dist = WeightedIndex::new(weights);

    let j_best = gammaj_given_istar_dist.map(|x| x.sample(&mut rng))?;

    // if let Some(logger) = logger {
    //     info!(
    //         logger,
    //         "j_best (choosen at random according to weights): {}", j_best
    //     );
    // }

    let sj = &next_token_distribution.labels.unwrap()[j_best];

    if let Some(logger) = logger {
        info!(logger, "outputting value j_best={} sj={:?}", j_best, sj);
    }

    output.push(sj.clone());

    // Important: update the next token distribution generator! (TODO maybe rename the method)
    cnext.select(sj.clone()).await?;

    let weights_map_column = BTreeMap::from_iter(
        gammaj
            .iter()
            .filter(|(_, (jprime, j))| *(if !swap_p_q { jprime } else { j }) == j_best)
            .map(|(x, (i, iprime))| (*(if !swap_p_q { iprime } else { i }), *x)),
    );

    if let Some(logger) = logger {
        info!(logger, "wm_column for updated_mu #{:?}", weights_map_column);
    }

    let weights_column_with_indices =
        (0..message_space_size).map(|i| (weights_map_column.get(&i).unwrap_or(&0.0), i));

    // if let Some(logger) = logger {
    //     debug!(
    //         logger,
    //         "wm_column_with_indices {:?}",
    //         weights_column_with_indices.clone().collect::<Vec<_>>()
    //     );
    // }

    let updated_mu = DiscreteDistribution {
        d: weights_column_with_indices
            .map(|x| *x.0)
            .collect::<Vec<_>>(),
        labels: mu[istar].labels.clone(),
    };

    // normalize to get the conditional probability, no?
    let sum: f64 = updated_mu.d.iter().sum();
    let mut updated_mu = DiscreteDistribution {
        d: updated_mu.d.iter().map(|x| x / sum).collect::<Vec<_>>(),
        labels: updated_mu.labels,
    };

    if let Some(logger) = logger {
        info!(
            logger,
            "entropy of updated mu[{}] = {}",
            istar,
            updated_mu.d.entropy()
        );
    }

    // Sort it in non-increasing order again!
    let mut indices = (0..updated_mu.d.len()).collect::<Vec<_>>();
    indices.sort_by_key(|&i| std::cmp::Reverse(NotNan::new(updated_mu.d[i]).unwrap()));

    if let Some(logger) = logger {
        debug!(logger, "indices sorted by key = {:?}", indices);
    }

    updated_mu.labels = updated_mu
        .labels
        .map(|v| indices.iter().map(|&i| v[i].clone()).collect::<Vec<_>>());

    updated_mu.d = indices.iter().map(|&i| updated_mu.d[i]).collect();

    // if let Some(logger) = logger {
    //     debug!(
    //         logger,
    //         "the updated mu for {} is now {:?} (labels {:?})",
    //         istar,
    //         updated_mu.d,
    //         updated_mu.labels
    //     );
    // }

    mu[istar] = updated_mu;

    log_mu_table(tx, mu);

    if should_stop(mu) {
        if let Some(logger) = logger {
            info!(logger, "stopping criterion reached");
        }
        return Ok(true);
    }
    Ok(false)
}
