use std::fmt::Debug;
use std::rc::Rc;
use num_traits::Num;
use crate::autograd::function::{ForwardArgs, Function};
use crate::autograd::function_ctx::FunctionCtx;
use crate::misc::shape_to_strides;
use crate::Tensor;

pub struct ViewBackward<T> {
    ctx: FunctionCtx<T>,
}

impl<T> Function<T> for ViewBackward<T>
    where
        T: Copy + Num + 'static,
{
    fn new(ctx: FunctionCtx<T>) -> Self {
        Self { ctx }
    }

    fn ctx(&self) -> &FunctionCtx<T> {
        &self.ctx
    }

    fn forward(ctx: &mut FunctionCtx<T>, args: ForwardArgs<T>) -> Tensor<T> {
        if let ForwardArgs::TensorOther(tensor, shape) = args {
            ctx.save_others([tensor.shape().as_ref()].into());

            let numel = tensor.numel();
            assert_eq!(
                numel, shape.iter().product(),
                "shape '{:?}' is invalid for input of size {}",
                shape, numel);

            let data = if tensor.is_contiguous() {
                Rc::clone(&(*tensor.ptr).borrow().data)
            } else {
                tensor
                    .flat_iter()
                    .collect::<Box<[T]>>()
                    .into()
            };

            let strides = if data.len() == 1 {
                shape.iter().map(|_| 0).collect()
            } else {
                shape_to_strides(shape)
            };

            let shape = shape
                .to_vec()
                .into_boxed_slice();

            (data, tensor.offset(), shape, strides).into()
        } else {
            unreachable!()
        }
    }

    fn backward(&self, grad_output: Tensor<T>) -> Vec<Option<Tensor<T>>> {
        let shape = self.ctx.others()[0];
        [grad_output.view(shape).into()].into()
    }
}
