//
// Created by sunmy on 2020/10/17.
//

#ifndef HW1_CONJUGATE_GRADIENT_H
#define HW1_CONJUGATE_GRADIENT_H

#include "Eigen/Core"
#include <iostream>

template <int n = 10, typename T = long double>
void conjugate_gradient_impl(T max_error = 0.00001)
{
    using namespace Eigen;
    using namespace std;
    typedef Matrix<T, n, n> MatrixND;
    typedef Matrix<T, n, 1> VectorND;

    MatrixND H = MatrixND();

    for (auto i = 0; i < n; ++i)
    {
        for (auto j = 0; j < n; ++j)
        {
            H(i, j) = (T)1.0 / (i + j + 1);
        }
    }

    VectorND b = H * VectorND::Ones(n, 1);

    //    cout << "Now the matrix H is:" << endl << H << endl;
    //    cout << "Now the vector b is:" << endl << b << endl;

    // We only need to store the value of old_r and old_p also new_r and new_p
    VectorND old_r = VectorND::Zero();
    VectorND old_p = VectorND::Zero();
    VectorND new_r = VectorND::Zero();
    VectorND new_p = VectorND::Zero();

    // We start the iteration from zero vector
    VectorND x = VectorND::Zero();
    new_r = b - H * x;
    new_p = new_r;

    T alpha = 0;
    T beta = 0;

    for (auto iter_time = 0; iter_time < n; ++iter_time)
    {
        old_r = new_r;
        old_p = new_p;

        // calculate new_r
        alpha = old_r.squaredNorm() / (H * old_p).dot(old_p);
        x += alpha * old_p;
        if (old_p.norm() * alpha < max_error)
            break;

        // do not calculate new_p or new_r during the last iteration
        if (iter_time == n - 1)
            break;

        new_r = old_r - alpha * H * old_p;
        beta = new_r.squaredNorm() / old_r.squaredNorm();
        new_p = new_r + beta * old_p;
        if (new_r.norm() <= std::numeric_limits<T>::min() or
            new_p.norm() <= std::numeric_limits<T>::min())
            break;
    }

    cout.precision(12);
    cout << "The solution for n = " << n << " is:" << endl
         << x << endl;
}

void conjugate_gradient()
{
    conjugate_gradient_impl<10>();
    conjugate_gradient_impl<11>();
    conjugate_gradient_impl<12>();
    conjugate_gradient_impl<13>();
    conjugate_gradient_impl<14>();
    conjugate_gradient_impl<15>();
}

#endif //HW1_CONJUGATE_GRADIENT_H
