pub mod from;
pub mod tensor_cell;

use std::cell::RefCell;
use std::fmt::{Debug, Formatter};
use std::iter::zip;
use std::rc::Rc;
use itertools::Itertools;
use num_traits::Num;
use crate::autograd::expand_backward::ExpandBackward;
use crate::autograd::function::ForwardArgs::{TensorOther, TensorTensor};
use crate::autograd::function::Function;
use crate::autograd::mm_backward::MmBackward;
use crate::autograd::permute_backward::PermuteBackward;
use crate::autograd::pow_backward::PowBackward;
use crate::autograd::select_backward::SelectBackward;
use crate::autograd::view_backward::ViewBackward;
use crate::misc::broadcast_shapes;
use crate::tensor::tensor_cell::TensorCell;

#[derive(Clone)]
pub struct Tensor<T> {
    pub(crate) ptr: Rc<RefCell<TensorCell<T>>>,
}

impl<T> Tensor<T>
    where
        T: Copy + Num + 'static,
{
    pub fn scalar(value: T) -> Self {
        (Rc::new([value].into()), 0, [].into(), [].into()).into()
    }
}

impl<T> Tensor<T>
    where
        T: Copy + Num + 'static,
{
    pub fn expand_as(&self, other: Self) -> Self {
        self.expand(other.shape().as_ref())
    }

    pub fn flatten(&self) -> Self {
        self.reshape(&[self.numel()])
    }

    pub fn expand(&self, shape: &[usize]) -> Self {
        if self.shape().as_ref() == shape {
            self.clone()
        } else {
            ExpandBackward::<T>::apply(TensorOther(self, shape))
        }
    }

    pub fn index(&self, index: &[usize]) -> Self {
        SelectBackward::<T>::apply(TensorOther(self, index))
    }

    pub fn mm(&self, other: &Self) -> Self {
        MmBackward::<T>::apply(TensorTensor(self, other))
    }

    pub fn permute(&self, axis: &[usize]) -> Self {
        PermuteBackward::<T>::apply(TensorOther(self, axis))
    }

    pub fn pow(&self, exp: usize) -> Self {
        PowBackward::<T>::apply(TensorOther(self, &[exp]))
    }

    pub fn reshape(&self, shape: &[usize]) -> Self {
        self.view(shape)
    }

    pub fn view(&self, shape: &[usize]) -> Self {
        ViewBackward::<T>::apply(TensorOther(self, shape))
    }

    pub fn data(&self) -> Rc<Box<[T]>> {
        Rc::clone(&(*self.ptr).borrow().data)
    }

    pub fn grad(&self) -> Option<Tensor<T>> {
        (*self.ptr).borrow().grad.clone()
    }

    pub fn numel(&self) -> usize {
        (*self.ptr).borrow().shape.iter().product()
    }

    pub fn offset(&self) -> usize {
        (*self.ptr).borrow().offset
    }

    pub fn requires_grad(&self) -> bool {
        (*self.ptr).borrow().requires_grad
    }

    pub fn requires_grad_(&self, requires_grad: bool) {
        (*self.ptr).borrow_mut().requires_grad = requires_grad;
    }

    pub fn retains_grad(&self) -> bool {
        self.ptr.as_ref().borrow().retains_grad
    }

    pub fn retain_grad(&self) {
        assert!(self.requires_grad(), "can't retain_grad on Tensor that has requires_grad=False");
        (*self.ptr).borrow_mut().retains_grad = true;
    }

    pub fn size(&self) -> Box<[usize]> {
        self.shape()
    }

    pub fn shape(&self) -> Box<[usize]> {
        (*self.ptr).borrow().shape.clone()
    }

    pub fn stride(&self) -> Box<[usize]> {
        (*self.ptr).borrow().strides.clone()
    }

    pub fn argmax(&self) -> Tensor<usize>
        where
            T: PartialOrd,
    {
        let idx = self.flat_iter()
            .enumerate()
            .max_by(|x, y| *&x.1.partial_cmp(&y.1).unwrap())
            .map(|x| x.0)
            .unwrap();
        Tensor::scalar(idx)
    }

    pub fn backward(&self, grad_output: Tensor<T>) {
        let mut cell = (*self.ptr).borrow_mut();

        assert!(cell.requires_grad, "tensor does not require grad and does not have a grad_fn");
        assert_eq!(
            cell.shape, grad_output.shape(),
            "grad_output has a shape of {:?} and tensor has a shape of {:?}",
            grad_output.shape(), cell.shape);

        if (cell.requires_grad && cell.grad_fn.is_none()) || cell.retains_grad {
            let grad = match &cell.grad {
                Some(grad) => Some(grad + &grad_output),
                None => Some(grad_output.clone()),
            };
            cell.grad = grad.into();
        }

        cell.grad_fn
            .as_ref()
            .map(|grad_fn| grad_fn.apply_backward(grad_output));
    }

    pub fn backward_ones(&self) where T: Debug {
        let ones = Tensor::scalar(T::one())
            .expand(self.shape().as_ref());
        self.backward(ones)
    }

    pub fn broadcast_to(&self, shape: &[usize]) -> Self {
        self.expand(shape)
    }

    pub fn contiguous(&self) -> Self {
        if self.is_contiguous() {
            return self.clone()
        }

        let data = self
            .flat_iter()
            .collect::<Box<[T]>>();

        (data.into(), self.shape()).into()
    }

    pub fn flat_iter(&self) -> impl Iterator<Item=T> + '_ {
        self.shape().iter()
            .map(|x| 0..*x)
            .multi_cartesian_product()
            .map(|i| i.iter()
                .zip(self.stride().iter())
                .map(|(a, b)| *a * *b)
                .sum::<usize>())
            .map(|i| i + self.offset())
            .map(|i| self.data()[i])
    }

    pub fn item(&self) -> T {
        assert_eq!((*self.ptr).borrow().data.len(), 1, "only one element tensors can be converted to scalars");
        let t = (*self.ptr).borrow();
        t.data[t.offset]
    }

    pub fn is_contiguous(&self) -> bool {
        (*self.ptr).borrow()
            .strides
            .windows(2)
            .all(|x| x[0] >= x[1])
    }

    pub fn map<F>(&self, op: F) -> Self
        where
            F: Fn(T) -> T,
    {
        let cell = (*self.ptr).borrow();
        let data = cell.data
            .iter()
            .map(|x| op(*x))
            .collect::<Box<[T]>>()
            .into();
        (data, cell.offset, cell.shape.clone(), cell.strides.clone()).into()
    }

    pub fn t(&self) -> Self {
        if self.shape().len() < 2 {
            self.clone()
        } else {
            self.transpose(0, 1)
        }
    }

    pub fn transpose(&self, dim0: usize, dim1: usize) -> Self {
        let mut axis = (0..self.shape().len()).collect_vec();
        axis.swap(dim0, dim1);
        self.permute(&axis)
    }

    pub fn zero_grad(&self, set_to_none: bool) {
        let mut cell = (*self.ptr).borrow_mut();
        cell.grad = if set_to_none {
            None
        } else {
            Tensor::scalar(T::zero())
                .expand(self.shape().to_vec().as_slice())
                .into()
        }
    }

    pub fn zip<F, S>(&self, other: &Tensor<S>, op: F) -> Self
        where
            S: Copy + Num + 'static,
            F: Fn(T, S) -> T,
    {
        let shape = if self.shape() == other.shape() {
            if self.shape().is_empty() {
                let value = op(self.item(), other.item());
                return (Rc::new([value].into()), [].into()).into();
            }
            self.shape()
        } else {
            broadcast_shapes(&[&self.shape(), &other.shape()])
        };

        let data = zip(
            self.expand(&shape).flat_iter(), // TODO: dont expand when shapes are equal
            other.expand(&shape).flat_iter()
        )
            .map(|(a, b)| op(a, b))
            .collect::<Box<[T]>>();

        (data.into(), shape).into()
    }
}

impl<T> Debug for Tensor<T>
    where
        T: Debug,
{
    fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result {
        let cell = (*self.ptr).borrow();
        f.debug_struct("Tensor")
            .field("data", &*cell.data)
            .field("offset", &cell.offset)
            .field("shape", &cell.shape)
            .field("strides", &cell.strides)
            .field("grad", &cell.grad)
            .field("requires_grad", &cell.requires_grad)
            .finish()
    }
}