#include <gtsam/nonlinear/LevenbergMarquardtOptimizer.h>
#include <gtsam/nonlinear/DoglegOptimizer.h>
#include <gtsam/nonlinear/NonlinearFactorGraph.h>
#include <gtsam/inference/Symbol.h>

#include <fstream>
#include <iostream>
#include <ctime>
#include <cstdlib>

using namespace gtsam;
using namespace std;
using symbol_shorthand::X;

class CurveFitFactor : public gtsam::NoiseModelFactor1<Vector2>
{
public:
    CurveFitFactor(Key j, double x, double y, const SharedNoiseModel &mode)
        :NoiseModelFactor1<Vector2>(mode, j), _x(x), _y(y)  {}
    virtual ~CurveFitFactor() {}
    Vector evaluateError(const Vector2& q, boost::optional<Matrix&> H = boost::none) const
    {
        if (H) (*H) = (Matrix(2,2) << -_x * exp(q[0]*_x+q[1]),  -exp(q[0]*_x+q[1]), 0, 0).finished();
        return (Vector(2) << _y - exp(q[0]*_x + q[1]), 0).finished();
    }
    virtual gtsam::NonlinearFactor::shared_ptr clone() const
    {
        return boost::static_pointer_cast<gtsam::NonlinearFactor>(gtsam::NonlinearFactor::shared_ptr(new CurveFitFactor(*this)));
    }
private:
    double _x, _y;
};

int generateData(std::vector<double>& xData, std::vector<double>& yData)
{
    xData.clear(); yData.clear();
    srand(unsigned(time(0)));
    for (int i = 0; i < 100; ++i) {
        double x = 0.1*i+0.23;
        xData.push_back(x);
        double y = exp(0.3*x+0.1);
        double noise = double(rand()%(10))*0.00234;
        y += noise;
        std::cout << noise << " " << y << std::endl;
        yData.push_back(y);
    }

    return 0;
}

int main(int argc, char* argv[])
{
    std::vector<double> xData, yData;
    generateData(xData, yData);

    NonlinearFactorGraph *graph = new NonlinearFactorGraph();
    Values initial_values;
    Vector2 param; param << 0.2, 0.2;
    initial_values.insert(X(0), param);

    noiseModel::Diagonal::shared_ptr noise_mode = noiseModel::Isotropic::Sigma(2, 1.5);
    for (size_t i = 0; i < 100; ++i) {
        graph->add(CurveFitFactor(X(0),xData[i],yData[i],noise_mode));
    }
//    LevenbergMarquardtOptimizer optimizer(*graph, initial_values);
    DoglegOptimizer optimizer(*graph, initial_values);
    Values result = optimizer.optimize();
    std::cout << "result: \n" << result.at<Vector2>(X(0)) << std::endl;
    return 0;
}
