#include"backendshen/problemshen.h"
#include <iostream>
#include <fstream>
#include "utils/tic_toc.h"

namespace myslam{
namespace backend{
    Problem::Problem(ProblemType problemType):problemType_(problemType)
    {
        verticies_marg_.clear();
    }
    Problem::~Problem(){}
    bool Problem::AddVertex(std::shared_ptr<Vertex> vertex)
    {
        if(verticies_.find(vertex->Id())!=verticies_.end())
        {
            return false;
        }
        else{
            verticies_.insert(pair<ulong, shared_ptr<Vertex>>(vertex->Id(), vertex));
        }
        return true;
    }

    bool Problem::AddEdge(shared_ptr<Edge> edge)
    {
        if(edges_.find(edge->Id())!=edges_.end())
        {
            return false;
        }
        else
        {
            edges_.insert(pair<ulong, shared_ptr<Edge>>(edge->Id(), edge));
        }
        return true;
    }
    bool Problem::Solve(int iterations)
    {
       
        if(edges_.size()==0||verticies_.size()==0)
        {
    
            std::cout << "\n Cannot solve problem without edges or verticies" << std::endl;
            return false;
        }
        TicToc t_solve;
        //统计优化变量的维度 为构建H矩阵做准备
        SetOrdering();
        //遍历edge 构建H=J^T*J
        MakeHessian();
        //LM初始化，设置一下阻尼lambda的初始值
        ComputeLambdaInitLM();
        //LM迭代算法求解
        bool stop = false;  //是否会提前结束
        int iter = 0;   //累积迭代次数
        while(!stop&&(iter<iterations))
        {
            std::cout << "iter: " << iter << " ,chi= " << currentChi_ << ", Lambda: " << currentLambda_ << std::endl;
            bool oneStopSuccess = false;
            int false_cnt = 0;
            //不断尝试lambda 直到成功迭代一步
            while(!oneStopSuccess)
            {
                //setLambda
                AddLambdatoHessianLM();
                //解线性方程 Hx=B
                SolveLinearSystem();

                RemoveLambdaHessianLM();
                //优化退出条件1： delta_x_ 很小则退出
                if(delta_x_.squaredNorm()<= 1e-130 || false_cnt>10)
                {
                    cout << "delta_x_" << delta_x_.squaredNorm() << " 太小了，退出迭代" << std::endl;
                    stop = true;
                    break;
                }
                //更新变量  x=x+deltax
                UpdateStates();
                //判断当前步是否可行以及LM的lambda如何更新
                oneStopSuccess = IsGoodStepInLM();
                if(oneStopSuccess)
                {
                    //在新的线性化点出构建hessian矩阵
                    MakeHessian();
                    false_cnt = 0; 
                }
                else
                {
                    false_cnt++;
                    //误差没有下降，回滚
                    RollbackStates();
                }
            }
            iter++;
            cout << "iter:" << iter << endl;
            // 优化退出条件   currentChi_和第一次相比 下降了1e-6倍则退出
            if(sqrt(currentChi_)<=stopThresholdLM_)
            {
                stop = true;
                cout << "和第一次相比，下降了1e-6倍退出" << std::endl;
            }
        }
        std::cout << "problem solve cost : " << t_solve.toc() <<" ms "<< std::endl;
        std::cout << "makeHessian cost : " << t_hessian_cost_ << " ms " << std::endl;
        return true;
    }
    void Problem::RollbackStates()
    {
        for(auto& vertex:verticies_)
        {
            ulong idx = vertex.second->OrderingId();
            ulong dim = vertex.second->LocalDimension();
            VecX delta = delta_x_.segment(idx, dim);
            vertex.second->Plus(-delta);
        }
    }
    bool Problem::IsGoodStepInLM()
    {
        double scale = 0;
        //ppt公式（11）
        scale = delta_x_.transpose() * (currentLambda_ * delta_x_ + b_);
        scale += 1e-3;  //make sure it's non-zeros
        double tempChi = 0.0;  //这是更新完节点之后，计算的误差值
        for(auto& edge:edges_)
        {
            edge.second->ComputeResidual();
            tempChi += edge.second->Chi2();
        }
        double rho = (currentChi_ - tempChi) / scale;
        if(rho>0&& isfinite(tempChi))   //误差是在下降的 last step is good 
        {
            //ppt 公式13 nielsen
            double alpha = 1. - pow((2 * currentLambda_ - 1), 3);
            alpha = std::min(alpha, 2. / 3.);   //两个比较数据类型要一样
            double scaleFactor = std::max(alpha, 1.0 / 3.);
            currentLambda_ *= scaleFactor;
            ni_ = 2;
            currentChi_ = tempChi;
            return true;
        }
        else
        {
            currentLambda_ *= ni_;
            ni_ *= 2;
            return false;
        }
    }
    void Problem::UpdateStates()
    {
        for(auto& vertex:verticies_)
        {
            ulong idx = vertex.second->OrderingId();
            ulong dim = vertex.second->LocalDimension();
            VecX delta = delta_x_.segment(idx, dim);
            //所有参数x都叠加一个变量
            vertex.second->Plus(delta);
        }
    }
    void Problem::RemoveLambdaHessianLM()
    {
         ulong size = Hessian_.rows();
        assert(Hessian_.rows() == Hessian_.cols() && "Hessian is now square!");
        for (ulong i = 0; i < size;++i)
        {
            Hessian_(i, i) -= currentLambda_;
        }
    }
    void Problem::SolveLinearSystem()
    {
        delta_x_ = Hessian_.inverse() * b_;
    }
    void Problem::AddLambdatoHessianLM()
    {
        ulong size = Hessian_.rows();
        assert(Hessian_.rows() == Hessian_.cols() && "Hessian is now square!");
        for (ulong i = 0; i < size;++i)
        {
            Hessian_(i, i) += currentLambda_;
        }
    }
    void Problem::SetOrdering()
    { 
        //每次重新计数
        ordering_poses_ = 0;
        ordering_generic_ = 0;
        ordering_landmarks_ = 0;
        //统计待估计所有变量的总维度
        for(auto& vertex:verticies_)
        {
            ordering_generic_ += vertex.second->LocalDimension();
        }
    }

    void Problem::ComputeLambdaInitLM()
    {
        ni_ = 2.;
        currentLambda_ = -1;
        currentChi_ = 0.0;
        //robust cost chi2
        for(auto& edge:edges_)
        {
            currentChi_ += edge.second->Chi2();
        }
        if(err_prior_.rows()>0)
            currentChi_ += err_prior_.norm();
        stopThresholdLM_ = 1e-20 * currentChi_;  //迭代结束条件，误差下降1e-6倍
        double maxDiagonal = 0;    //diagonal   对角线的意思
        ulong size = Hessian_.cols();
        assert(Hessian_.cols() == Hessian_.rows() && "Hessian is not square");
        for (ulong i = 0; i < size;++i)
        {
            maxDiagonal = std::max(fabs(Hessian_(i, i)), maxDiagonal);
        }
        // 初始值lambda=t*max{(J^T*J)_ii}   t属于[1e-8,1];
        double tau = 1e-5;
        currentLambda_ = tau * maxDiagonal;
    }

    void Problem::MakeHessian()
    {
        TicToc t_h;
        ulong size = ordering_generic_;
        MatXX H(MatXX::Zero(size, size));
        VecX b(VecX::Zero(size));


        //遍历每个残差并计算雅可比 得到最后的H=J^T*J
        for(auto& edge:edges_)
        {
            edge.second->ComputeResidual();
            edge.second->ComputeJacobians();
            auto jacobians = edge.second->Jacobians();
            auto verticies = edge.second->Verticies();
            assert(jacobians.size() == verticies.size());
            for (size_t i = 0; i < verticies.size();++i)
            {
                auto v_i = verticies[i];
                if(v_i->IsFixed())
                {
                    continue;
                }
                auto jacobian_i = jacobians[i];
                ulong index_i = v_i->OrderingId();
                ulong dim_i = v_i->LocalDimension();
                MatXX JtW = jacobian_i.transpose() * edge.second->Information();    //3*1
                for (size_t j = i; j < verticies.size();++j)
                {
                    auto v_j = verticies[j];
                    if(v_j->IsFixed())
                    {
                        continue;
                    }
                    auto jacobian_j = jacobians[j];
                    ulong index_j = v_j->OrderingId();
                    ulong dim_j = v_j->LocalDimension();
                    assert(v_j->OrderingId() != -1);
                    MatXX hessian = JtW * jacobian_j;   //3*1*1*3=3*3
                    //所有的信息矩阵叠加起来
                    H.block(index_i, index_j, dim_i, dim_j).noalias() += hessian;
                    if(j!=i)
                    {
                        H.block(index_j, index_i, dim_j, dim_i).noalias() += hessian.transpose();
                    }
                }
                b.segment(index_i, dim_i).noalias() -= JtW * edge.second->Residual();
            }
        }
        Hessian_ = H;
        b_ = b;
        std::cout << Hessian_ << std::endl;
        std::cout << b_ << std::endl;
        t_hessian_cost_ += t_h.toc();
        delta_x_ = VecX::Zero(size);
    }
}
}