#include <torch/torch.h>
#include <iostream>
using namespace std;

int basic_autograd_operations(){
    /*
     * 1. 创建张量，并且设置torch::requires_grad()，追踪计算过程
     */
    torch::Tensor x = torch::ones({2,2}, 
            torch::requires_grad(true));
    cout << "x:\n" << x << endl;

    /*
     * 2. 进行张量运算。因为y是作为运算结果被创建的，
     * 所以y拥有grad_fn()的属性
     * */
    torch::Tensor y = x.add(2);
    cout << "y:\n" << y << endl;
    cout << "y.grad_fn()->name():\n" << y.grad_fn()->name() << endl;

    torch::Tensor z = y*y*3;
    cout << "z:\n" << z << endl;
    cout << "z.grad_fn()->name:\n" << z.grad_fn()->name() << endl;

    torch::Tensor out = z.mean();  // 计算z的平均值
    cout << "out:\n" << out << endl;
    cout << "out.grad_fn()->name:\n" << out.grad_fn()->name() << endl;

    /*
     * 3. 现在开始反向传播。因为"out"包含一个标量，
     * 所以out.backward()等价于 out.backward(torch::tensor(1.));
     * 注意：backward()只允许调用一次，调用过后，计算图不会被保留
     * 如果需要多次调用，需要添加以下参数
     * out.backward(out, true);
     * */
    out.backward();

    /*
     * 4. 计算偏导：d(out)/dx
     * */
    cout << "x.grad():\n" << x.grad() << endl;

    /*
     * 5. 使用.requires_grad_()改变
     * 已存在的tensor的`requires_grad`的属性值
     * */
     torch::Tensor a = torch::randn({2,2});
     a = ( (a * 3) / (a-1) );
     cout << "a.requires_grad(): " << a.requires_grad() << endl;
     a.requires_grad_(true);
     cout << "a.requires_grad(): " << a.requires_grad() << endl;

     torch::Tensor b = (a * a).sum();
     cout << "b.grad_fn()->name(): " << b.grad_fn()->name() << endl;

     /*
      * 6. 在代码块中添加torch::NoGradGuard
      * 可以让tensor不允许被计算梯度，
      * 即使用requires_grad()输出为0
      * */
     cout << "x.requires_grad: " << x.requires_grad() << endl;
     cout << "x.pow(2).requires_grad: " << x.pow(2).requires_grad() << endl;
     {
         torch::NoGradGuard no_grad;
         cout << "x.pow(2).requires_grad: " << x.pow(2).requires_grad() << endl;
     }

     /*
      * 7. 或者使用.detach()创建一个新的tensor
      * 这个tensor不会带有requires_grad()的属性
      * （即，从计算图上分离出来？）
      * */
     cout << "x.requires_grad(): " << x.requires_grad() << endl;
     y = x.detach();
     cout << "y.requires_grad(): " << y.requires_grad() << endl;
     cout << x.eq(y).all().item<bool>() << endl;

     /*
      * 8. 使用Jacobian-vector Product的例子
      * 如果想使用Jacobian-vector Product，则定义一个向量，
      * 作为参数传给backward()
      * */
     x = torch::randn(3, torch::requires_grad(true));
     y = x * 2;
     while(y.norm().item<double>() < 1000){
         y = y * 2;
     }
     cout << "y: " << y << endl;
     cout << "y.grad_fn()->name(): " << y.grad_fn()->name() << endl;
     torch::Tensor v = torch::tensor({0.1, 1.0, 0.0001}, torch::kFloat);
     y.backward(v);
     cout << "(Jacobian)x.grad(): \n" << x.grad() << endl;

     return 0;
}

int main(){
    basic_autograd_operations();
    return 0;
}
