# https://gitee.com/yueyinqiu5990/tj12413601/blob/master/assignment4/question2/torch_plus.py
# Modified from https://gitee.com/yueyinqiu5990/tj12413601/blob/master/assignment4/question1/torch_plus.py
import typing

import torch


def rand(size: int | typing.Sequence[int],
         minimal: float,
         maximal: float,
         random: torch.Generator = None,
         requires_grad: bool = False) -> torch.Tensor:
    length = maximal - minimal
    r = torch.rand(size, generator=random) * length + minimal
    return r.requires_grad_(requires_grad)


def grad(output: torch.Tensor,
         inputs: typing.Sequence[torch.Tensor],
         create_graph: bool = False,
         retain_graph: bool = False) -> typing.Sequence[torch.Tensor]:
    return torch.autograd.grad(output,
                               inputs,
                               torch.ones_like(output),
                               create_graph=create_graph,
                               retain_graph=retain_graph)


def clone_detached(tensor: torch.Tensor,
                   requires_grad: bool = False) -> torch.Tensor:
    tensor = torch.detach(tensor)
    tensor = torch.clone(tensor)
    return tensor.requires_grad_(requires_grad)


def reshape_as(tensor: torch.Tensor,
               the_template: torch.Tensor,
               detach: bool = False,
               clone: bool = False):
    if detach:
        tensor = torch.detach(tensor)
    if clone:
        tensor = torch.clone(tensor)
    return torch.reshape(tensor, the_template.shape)


def as_tensor(data: float,
              d_type: torch.dtype = torch.float,
              requires_grad: bool = False):
    return torch.tensor(data, dtype=d_type, requires_grad=requires_grad)
