import matplotlib.pyplot as plt
from mindspore import numpy as np
# def function_1(x):
#     return .01 * x * x + .1 * x
#
#
# def function_diff(x):
#     return .02 * x + 0.1
#
#
# def numerical_diff(f, x):
#     h = 1e-4
#     return (f(x + h) - f(x - h)) / h
#
#
# x = np.arange(-2.0, 20.0, 0.1)
# y = function_1(x)
# # plt.figure()
# plt.xlabel("x")
# plt.ylabel('y')
# plt.title('the geometic meaning of derivatives')
# plt.plot(x, y, label='f(x)')
# plt.plot(x, 0.2 * x - 0.25, label="Tangent line")
# plt.scatter(5, 0.75, color='r', label='tangent point')
# plt.legend()
# plt.show()
#
# dx_5_num = numerical_diff(function_1, 5)
# dx_5_sym = function_diff(5)
# print(dx_5_num, dx_5_sym)
from mindspore import Parameter, Tensor
import mindspore.nn as nn


class Net(nn.Cell):
    def __int__(self):
        super().__init__()
        self.w = Parameter(np.array([6, 0], name='w'))
        self.b = Parameter(np.array([1, 0]), name='b', requires_grad=False)

    def construct(self, x):
        f = self.w * x + self.b
        f = ops.stop_gradient(f)
        return f


from mindspore import dtype as mstype
from mindspore import ParameterTuple
from mindspore import ops


class GradNet(nn.Cell):
    def __int__(self, net):
        super(GradNet, self).__int__()
        self.net = net
        self.params = ParameterTuple(net.trainable_params())
        self.grad_op = ops.GradOperation(sens_param=True)
        self.grad_wrt_output = Tensor([0.1], dtype=mstype.float32)

    def construct(self, x):
        gradient_function = self.grad_op(self.net, self.params)
        return gradient_function(x)


# x = Tensor([100], dtype=mstype.float32)
x = Tensor([5], dtype=mstype.float32)
output = GradNet(Net())(x)
print(output)
