use twinkle as tw;

fn main() {
    // let x_i = Tensor::<f64>::rand(&[1, 64], false);
    // let y_i = Tensor::<f64>::rand(&[1, 10], false);
    //
    // let weight = Tensor::<f64>::rand(&[64, 10], true).rc();
    // let bias = Tensor::<f64>::rand(&[1, 10], true).rc();
    //
    // let y_hat = Add::apply(Add::apply(x_i.rc(), weight).rc(), bias);

    let a = tw::tensor(&[5., 5., 5., 5.], &[2, 2], true);
    let b = tw::tensor(&[7., 7., 7., 7.], &[2, 2], true);
    println!("a {:?}", a);
    println!("b {:?}", b);

    let r1 = &a + &b;
    let r2 = &r1 + &b;
    let r3 = &r1 + &r2;

    // a    b
    //  \  /\
    //   r1  \
    //  /  \ /
    //  \  r2
    //   \ /
    //    r3

    r3.backward_ones();
    println!("a {:?}", a.grad());
    println!("b {:?}", b.grad());

    let a = tw::tensor(&[6, 5, 4, 11, 12, 13], &[2, 3], true);
    let b = tw::tensor(&[1, 2, 3, 4, 5, 6], &[3, 2], true);
    let mm1 = a.mm(&b);
    println!("{:?}", mm1);
    mm1.backward_ones();

    println!("{:?}", a.grad());
    println!("{:?}", b.grad());
    println!();

    let a = tw::tensor(&[2., 3.], &[2], true);
    let pow1 = a.pow(2);
    pow1.backward_ones();
    println!("{:?}", pow1);
    println!("a {:?}", a.grad());
    println!();

    let a = tw::tensor(&[1, 2, 3, 4], &[4, 1], true);
    let mut b = a.reshape(&[2, 2]);
    println!("a {:?}", a);
    println!("b {:?}", b);
    b.retain_grad();
    b.backward_ones();
    println!("a {:?}", a.grad());
    println!("b {:?}", b.grad());
    println!();

    let a = tw::rand(&[2, 2, 2, 2], true);
    println!("a {:?} {:?}", a.shape(), a.stride());
    let mut b = a.permute(&[1, 3, 0, 2]);
    println!("b {:?} {:?}", b.shape(), b.stride());
    b.retain_grad();
    b.backward(tw::tensor(&[
        1., 1., 2., 2., 3., 3., 4., 4.,
        5., 5., 6., 6., 7., 7., 8., 8.], &[2, 2, 2, 2], false));
    println!("a {:?}", a.grad());
    println!("b {:?}", b.grad());
    println!();

    let a = tw::tensor(&[1, 2, 3, 4, 5, 6], &[6], true);
    println!(" a {:?} {:?}",  a.flat_iter().collect::<Vec<i32>>(), a.shape());
    let mut a0 = a.view(&[2, 3]);
    a0.retain_grad();
    println!("a0 {:?} {:?}", a0.flat_iter().collect::<Vec<i32>>(), a0.shape());
    let mut a1 = a0.permute(&[1, 0]);
    a1.retain_grad();
    println!("a1 {:?} {:?}", a1.flat_iter().collect::<Vec<i32>>(), a1.shape());
    let mut a2 = a1.reshape(&[3, 2]);
    a2.retain_grad();
    println!("a2 {:?} {:?}", a2.flat_iter().collect::<Vec<i32>>(), a2.shape());
    let mut a3 = a2.index(&[1]);
    a3.retain_grad();
    println!("a3 {:?} {:?}", a3.flat_iter().collect::<Vec<i32>>(), a3.shape());
    a3.backward(tw::tensor(&[7, 8], &[2], false));
    println!(" a {:?}",  a.grad());
    println!("a0 {:?}", a0.grad());
    println!("a1 {:?}", a1.grad());
    println!("a2 {:?}", a2.grad());
    println!("a3 {:?}", a3.grad());
}
