use std::fmt::Debug;

use linfa::prelude::*;
use linfa_datasets::iris;
use ndarray::{prelude::*, Data, RawData};
use ndarray_linalg::Scalar;
use plotters::prelude::*;

fn main() {
    test_tree();
    //my_test_tree();
}

fn test_tree() {
    let (x, y) = {
        let i = iris();
        (i.records, i.targets)
    };
    let x = x.slice(s![.., 2..]);
    let x_column0 = x.column(0);
    let x_column1 = x.column(1);
    let x_max = x_column0.max().unwrap();
    let x_min = x_column0.min().unwrap();
    let y_max = x_column1.max().unwrap();
    let y_min = x_column1.min().unwrap();
    let data = x_column0.iter().zip(x_column1.iter()).zip(y.iter());
    let root = SVGBackend::new("./examples/tree/data.svg", (600, 400)).into_drawing_area();
    root.fill(&WHITE).unwrap();
    let mut chat = ChartBuilder::on(&root)
        .set_all_label_area_size(40)
        .build_cartesian_2d(x_min..x_max, y_min..y_max)
        .unwrap();
    chat.draw_series(data.map(|((x, y), c)| {
        Circle::new(
            (*x, *y),
            4,
            if *c == 0 {
                RED.filled()
            } else if *c == 1 {
                BLUE.filled()
            } else {
                CYAN.filled()
            },
        )
    }))
    .unwrap();
    chat.configure_mesh().draw().unwrap();

    let dataset = DatasetBase::new(x, y.view());
    let model = linfa_trees::DecisionTree::params()
        .max_depth(Some(5))
        .fit(&dataset)
        .unwrap();

    println!("{:?}", model);
    let ret = model.predict(&dataset).confusion_matrix(&dataset).unwrap();
    println!("{:?}", ret);
    println!("{}", ret.mcc());
}

fn my_test_tree() {
    let (x, y) = {
        let i: DatasetBase<
            ArrayBase<ndarray::OwnedRepr<f64>, Dim<[usize; 2]>>,
            ArrayBase<ndarray::OwnedRepr<usize>, Dim<[usize; 1]>>,
        > = iris();
        (i.records, i.targets)
    };
    let x = x.slice(s![.., 2..]);
    let ret = try_split(x, y.view());
    println!("{:?}", ret);

    let (x_l, x_r, y_l, y_r) = split(x, y.view(), ret.1, ret.2);
    println!("{}", entropy(y_l.view()));
    println!("{}", entropy(y_r.view()));
    // y_l的信息熵是0了，达到了最小，现在对y_r进行再次划分
    let ret = try_split(x_r.view(), y_r.view());
    println!("{:?}", ret);

    let (x_l, x_r, y_l, y_r) = split(x_r.view(), y_r.view(), ret.1, ret.2);
    println!("{}", entropy(y_l.view())); //比较小了
    println!("{}", entropy(y_r.view())); // 比较小了
}
// 模拟信息熵进行划分
fn split(
    x: ArrayView2<f64>,
    y: ArrayView1<usize>,
    d: usize,
    value: f64,
) -> (Array2<f64>, Array2<f64>, Array1<usize>, Array1<usize>) {
    assert!(d < x.ncols());
    let column = x.column(d);
    let index_a = get_index_condition(column, value, Operation::LESSEQ);
    let index_b = get_index_condition(column, value, Operation::GT);
    let mut x_left = Array2::zeros((0, 2));
    let mut x_right = Array2::zeros((0, 2));
    let mut y_left = vec![];
    let mut y_right: Vec<usize> = vec![];
    for (index, v) in column.iter().enumerate() {
        for i in &index_a {
            if index == *i {
                x_left.push_row(x.row(index)).unwrap();
                break;
            }
        }

        for i in &index_b {
            if index == *i {
                x_right.push_row(x.row(index)).unwrap();
                break;
            }
        }
    }

    for (index, v) in y.iter().enumerate() {
        for i in &index_a {
            if index == *i {
                y_left.push(*v);
                break;
            }
        }

        for i in &index_b {
            if index == *i {
                y_right.push(*v);
                break;
            }
        }
    }
    (
        x_left,
        x_right,
        Array::from_vec(y_left),
        Array::from_vec(y_right),
    )
}

// 计算信息熵
#[cfg(feature = "entropy")]
fn entropy(y: ArrayView1<usize>) -> f64 {
    let ret = y.classification();
    let sum = y.len();
    let mut res = 0.0;
    for v in ret {
        let p = v as f64 / sum as f64;
        res += -p * p.ln();
    }
    res
}

// 使用基尼系数
#[cfg(feature = "gini")]
fn entropy(y: ArrayView1<usize>) -> f64 {
    let ret = y.classification();
    let sum = y.len();
    let mut res = 1.0;
    for v in ret {
        let p = v as f64 / sum as f64;
        res -= p.square();
    }
    res
}

fn try_split(x: ArrayView2<f64>, y: ArrayView1<usize>) -> (f64, usize, f64) {
    let mut best_entropy = 0.99;
    let mut best_d = 1;
    let mut best_v = -1.0;
    for d in 0..x.ncols() {
        // 对x的相应的列进行排序
        let c = x.column(d);
        let ret = c.sort(true);
        for i in 1..x.len_of(Axis(0)) {
            if *ret[i - 1].1 != *ret[i].1 {
                let v = (*ret[i - 1].1 + *ret[i].1) / 2.0;
                let (x_left, x_right, y_left, y_right) = split(x, y, d, v);
                let e = entropy(y_left.view()) + entropy(y_right.view());
                if e < best_entropy {
                    best_entropy = e;
                    best_d = d;
                    best_v = v;
                }
            }
        }
    }
    (best_entropy, best_d, best_v)
}

trait MinMax<A> {
    fn min(&self) -> Option<A>;
    fn max(&self) -> Option<A>;
}

impl<A, S, D> MinMax<A> for ArrayBase<S, D>
where
    S: RawData<Elem = A> + Data,
    D: Dimension,
    A: PartialOrd + Copy,
{
    fn min(&self) -> Option<A> {
        let first = *self.first()?;
        Some(self.fold(first, |acc, x| if *x < acc { *x } else { acc }))
    }

    fn max(&self) -> Option<A> {
        let first = *self.first()?;
        Some(self.fold(first, |acc, x| if *x > acc { *x } else { acc }))
    }
}

// 排序，返回索引值
trait Sort<A> {
    fn sort(&self, ascent: bool) -> Vec<(usize, &A)>;
}

impl<A, S> Sort<A> for ArrayBase<S, Ix1>
where
    S: RawData<Elem = A> + Data,
    A: PartialOrd,
{
    fn sort(&self, ascent: bool) -> Vec<(usize, &A)> {
        // 冒泡法
        let mut data = self.iter().enumerate().collect::<Vec<_>>();
        data.sort_by(|v1, v2| {
            if ascent {
                v1.1.partial_cmp(v2.1).unwrap()
            } else {
                v2.1.partial_cmp(v1.1).unwrap()
            }
        });
        data
    }
}

#[derive(Clone, Copy, Debug)]
enum Operation {
    LESSEQ,
    GT,
}

fn get_index_condition(data: ArrayView1<f64>, value: f64, op: Operation) -> Vec<usize> {
    match op {
        Operation::LESSEQ => data
            .iter()
            .enumerate()
            .filter(|&(_, x)| *x <= value)
            .map(|(index, _)| index)
            .collect::<Vec<_>>(),
        Operation::GT => data
            .iter()
            .enumerate()
            .filter(|&(_, &x)| x > value)
            .map(|(index, _)| index)
            .collect::<Vec<_>>(),
    }
}

trait Classification<A> {
    fn classification(&self) -> Vec<usize>;
}

impl<A, S> Classification<A> for ArrayBase<S, Ix1>
where
    S: RawData<Elem = A> + Data,
    A: PartialOrd + Clone + Debug,
{
    fn classification(&self) -> Vec<usize> {
        assert!(self.len() > 0);
        let mut ret = vec![];
        let mut top = self.first().unwrap();
        let mut data = self.iter().collect::<Vec<_>>();
        data.sort_by(|v1, v2| v1.partial_cmp(v2).unwrap());
        let mut count = 0;
        for d in data {
            if *top == *d {
                count += 1;
            } else {
                ret.push(count);
                top = d;
                count = 1;
            }
        }
        ret.push(count);
        ret
    }
}
