/*
 * This tool make the original data set
 * The data set path is given as a command
 * line parameter to this bin.
 * Than Images will be transformed to lmdb
 * database.
 * The path of database is also given as
 * a commandline parameter
 *
 * The database created by this file is
 * used as the overall database. which
 * contains all interested pixels.
 */

#include "tools.h"
#include "boost/serialization/serialization.hpp"
#include <boost/serialization/base_object.hpp>
#include <boost/archive/text_oarchive.hpp>
#include <boost/archive/text_iarchive.hpp>
#include <boost/serialization/utility.hpp>
#include <boost/serialization/list.hpp>
#include "boost/algorithm/string.hpp"
#include <boost/format.hpp>
#include "boost/date_time/posix_time/posix_time.hpp"

using google::INFO;
using google::WARNING;
using google::FATAL;
using google::ERROR;
using boost::filesystem::path;
using boost::serialization::serialize;
using caffe::Caffe;
using boost::format;

static const string STAT_NAME("cur_state.txt");
static volatile bool stop = false;

void intHandler(int dummy) {
    stop = true;
}

enum {
    CONTINUE_TRAINING,
    PREDICTING,
    MAKING_INITDB,

};

string generateDBname();

class TrainState {
    friend class boost::serialization::access;

    template<class Archive>
    void serialize(Archive &ar, const unsigned int version) {
        ar &
        trainNet &
        lastSnapshot &
        lastDBName &
        solverPath &
        testString &
        initialDBName &
        runningPhase &
        initTrain &
        hasInitDB;
    }

public:
    bool hasInitDB;
    string testString;
    string initialDBName;
    string lastDBName;
    string solverPath;
    string lastSnapshot;
    string trainNet;
    bool initTrain;

    int runningPhase;

    TrainState() :
            hasInitDB(false),
            testString("abc"),
            initialDBName(generateDBname()),
            lastDBName(initialDBName),
            solverPath("/home/d/workspace/DeepDR/data/lenet_solver.prototxt"),
            trainNet("/home/d/workspace/DeepDR/data/lenet_train_test.prototxt"),
            lastSnapshot(""),
            runningPhase(CONTINUE_TRAINING),
            initTrain(true) { };
};

/*
 * The file list should be a table in csv format;
 * any blank space will be trimmed.
 *
 */
DEFINE_string(TrainingDataSet,
              "overAllDataSet",
              "all avail data set. Not balanced");
DEFINE_string(minorityDB,
              "minority",
              "the minority samples classes");

void saveState(TrainState *solverState) {
    std::ofstream fout(STAT_NAME);
    boost::archive::text_oarchive oa(fout);
    oa << (*solverState);
    fout.close();
}

void loadState(TrainState *solverState) {
    if (!boost::filesystem::exists(path(STAT_NAME)))
        return;
    std::ifstream fin(STAT_NAME);
    boost::archive::text_iarchive ia(fin);
    ia >> *solverState;
    fin.close();
}

string generateDBname() {
    int minDB = 1;
    int maxDB = 1 << 25;
    while (minDB < maxDB) {
        int testDB = (minDB + maxDB) / 2;
        string fName = string("dataSet_") + caffe::format_int(testDB, 10);
        if (exists(path(fName))) {
            minDB = testDB + 1;
        } else {
            maxDB = testDB;
        }
    }
    return string("dataSet_") + caffe::format_int(minDB, 10);
}

int main(int argc, char **argv) {
    srand((unsigned int) time(NULL));
    google::SetCommandLineOption("alsologtostderr", "true");
    google::SetCommandLineOption("colorlogtostderr", "true");
    google::InitGoogleLogging(argv[0]);
    google::SetUsageMessage(
            "Convert the original image set to lmdb database\n"
    );
    signal(SIGINT, intHandler);

    google::ParseCommandLineFlags(&argc, &argv, true);
    backward::SignalHandling sh;
    if (!sh.loaded()) {
        LOG(ERROR) << "BACKWARD NOT LOADED";
    }
    string trainingDB = FLAGS_TrainingDataSet;
    string minorityDB = FLAGS_minorityDB;

    TrainState solverState;
    loadState(&solverState);

    LMDBDataSet originalDataSet;
    LMDBDataSet minorityDataSet;
    originalDataSet.loadFromLMDB(trainingDB);
    minorityDataSet.loadFromLMDB(minorityDB);
    LOG(INFO) << "There are " << originalDataSet.length() << " samples";
    LOG(INFO) << "There are " << minorityDataSet.length() << " minority samples";

    // make initial data set by random sample;
    // TODO CHECK THIS CODE
    if (!solverState.hasInitDB) {
        LOG(INFO) << "Make Initial DataSet";
        LMDBDataSet initialDB;
        CHECK_EQ(solverState.initialDBName, solverState.lastDBName);
        initialDB.newDB(solverState.initialDBName);
        Permutation initialPM((int) minorityDataSet.length() * 2);
        caffe::Datum datum;
        for (int i = 0; i < minorityDataSet.length(); i++) {
            originalDataSet.seek((int) (rand() % originalDataSet.length()));
            originalDataSet.getDatum(datum);
            initialDB.addDatum(initialPM.next(), &datum);
            minorityDataSet.seek((int) (rand() % minorityDataSet.length()));
            minorityDataSet.getDatum(datum);
            initialDB.addDatum(initialPM.next(), &datum);
        }
        solverState.hasInitDB = true;
        saveState(&solverState);
    }

    // TODO Next run the training code
    std::ofstream predictLog("predictLog.txt", std::ios::app);
    while (!stop) {
        {
            boost::posix_time::ptime now = boost::posix_time::second_clock::local_time();
            predictLog << boost::posix_time::to_iso_extended_string(now) << std::endl;
        }
        // generate files;
        if (solverState.runningPhase == CONTINUE_TRAINING) {
            string netFile = "net.prototxt";
            string solverFile = "solver.prototxt";
            {
                std::ifstream ifs(solverState.trainNet);
                CHECK(exists(path(solverState.trainNet)));
                std::string content((std::istreambuf_iterator<char>(ifs)),
                                    (std::istreambuf_iterator<char>()));
                std::ofstream ofs(netFile);
                ofs << boost::format(content) % solverState.lastDBName;
                ofs.close();
            }
            {
                std::ifstream ifs(solverState.solverPath);
                CHECK(exists(path(solverState.solverPath)));
                std::string content((std::istreambuf_iterator<char>(ifs)),
                                    (std::istreambuf_iterator<char>()));
                std::ofstream ofs(solverFile);
                ofs << boost::format(content) % netFile % (solverState.initTrain ? 0.0003 : 0.0001);
                ofs.close();
            }
            // train on last database
            CHECK(boost::filesystem::exists(path(solverState.lastDBName)));
            caffe::SolverParameter solverParameter;
            caffe::ReadSolverParamsFromTextFileOrDie(
                    solverFile, &solverParameter);
            solverParameter.set_device_id(0);
            solverParameter.set_solver_mode(caffe::SolverParameter_SolverMode_GPU);
            Caffe::SetDevice(0);
            Caffe::set_mode(Caffe::GPU);
//            Caffe::set_solver_count(1);
            caffe::Solver<float> *solver = caffe::SolverRegistry<float>::CreateSolver(solverParameter);
            if (solverState.lastSnapshot.size()) {
                solver->Restore((solverState.lastSnapshot + ".solverstate").c_str());
            }
            LOG(INFO) << "Start solve, start iter " << solver->iter();
            solver->Step(solverState.initTrain ? 8000 : 3000);
            LOG(INFO) << "End solve, end iter " << solver->iter();
            solver->Snapshot();
            string filename = solverParameter.snapshot_prefix() +
                              "_iter_" +
                              caffe::format_int(solver->iter());
            LOG(INFO) << "snapshot saved to " << filename;
            solverState.lastSnapshot = filename;
            solverState.runningPhase = PREDICTING;
            solverState.initTrain = false;
            saveState(&solverState);
            delete solver;
        }
        // TODO Next perform prediction randomly on original data set to reduce time.
        // TODO generate new database.
        if (solverState.runningPhase == PREDICTING) {
            const int bachsize = 1024;
            const int ErrThreshHold = (const int) minorityDataSet.length() / 6;
            string DBName = generateDBname();
            LMDBDataSet newTrainingSet;
            newTrainingSet.newDB(DBName);

            string netPath = "/home/d/workspace/DeepDR/data/lenet.prototxt";
            Caffe::set_mode(Caffe::GPU);
            caffe::Net<float> net(netPath, caffe::TEST);
            net.CopyTrainedLayersFrom(solverState.lastSnapshot + ".caffemodel");
            caffe::Blob<float> *input_layer = net.input_blobs()[0];
            caffe::Blob<float> *output_layer = net.output_blobs()[0];
            input_layer->Reshape(bachsize, 3,
                                 Parameters::WINDOW_SIZE,
                                 Parameters::WINDOW_SIZE);
            net.Reshape();
            LOG(INFO) << "Predicting";
            int Error = 0;
            int PositiveSample = 0;
            int NegSample = 0;
            Permutation minorityPerm((int) minorityDataSet.length());
            Permutation overallPerm((int) originalDataSet.length());
            Statics statics;
            while (Error < ErrThreshHold) {
                // wrap input data
                PerformanceMeasure performanceMeasure;
                performanceMeasure.begin();
                int width = input_layer->width();
                int height = input_layer->height();
                float *input_data = input_layer->mutable_cpu_data();
                int imagesize = width * height * input_layer->channels();
                LOG(INFO) << imagesize;

                vector<Sample> vecSample;
                for (int batch = 0; batch < bachsize; batch++) {
                    std::vector<cv::Mat> input_channels;
                    for (int i = 0; i < input_layer->channels(); ++i) {
                        cv::Mat channel(height, width, CV_32FC1, input_data);
                        input_channels.push_back(channel);
                        input_data += width * height;
                    }

                    // preprocess
                    Sample sampleFromDataSet = originalDataSet[overallPerm.next()];
                    sampleFromDataSet.index = overallPerm.current();
                    if (sampleFromDataSet.label) {
                        PositiveSample++;
                    }
                    vecSample.push_back(sampleFromDataSet);

                    cv::Mat sample_normalized;
                    sampleFromDataSet.data.convertTo(sample_normalized, CV_32FC3, 1.0 / 255.0);
                    /* This operation will write the separate BGR planes directly to the
                     * input layer of the network because it is wrapped by the cv::Mat
                     * objects in input_channels. */
                    cv::split(sample_normalized, input_channels);

                    CHECK(reinterpret_cast< float * >(input_channels.at(0).data)
                          == net.input_blobs()[0]->cpu_data() + batch * imagesize)
                    << "Input channels are not wrapping the input layer of the network." << batch;
                }

                net.Forward();

                for (int batch = 0; batch < bachsize; batch++) {
                    const float *begin = output_layer->cpu_data() + batch * output_layer->channels();
                    const float *end = begin + output_layer->channels();
                    std::vector<float> vec(begin, end);
                    CHECK_EQ(vec.size(), 10);
                    // write to log file, saving all data
                    predictLog << std::setw(10) << vecSample[batch].index << " " << vecSample[batch].label << " : ";
                    int maxIdex = 0;
                    float maxVal = 0;
                    for (int ii = 0; ii < vec.size(); ii++) {
                        predictLog << std::fixed << vec[ii] << " ";
                        if (vec[ii] > maxVal) {
                            maxVal = vec[ii];
                            maxIdex = ii;
                        }
                    }
                    if (maxIdex == vecSample[batch].label) {
                        if (maxIdex == 0)
                            statics.addTN();
                        else
                            statics.addTP();
                    } else {
                        if (maxIdex == 0)
                            statics.addFN();
                        else
                            statics.addFP();
                    }
                    if (vecSample[batch].label == 0) {
                        NegSample++;
                    }
                    if (vecSample[batch].label != maxIdex) {
                        if (vecSample[batch].label == 0) {
                            // add sample
                            newTrainingSet.addSample(vecSample[batch]);
                            newTrainingSet.addSample(minorityDataSet[minorityPerm.next()]);
                            newTrainingSet.addSample(originalDataSet[overallPerm.next()]);
                            newTrainingSet.addSample(minorityDataSet[minorityPerm.next()]);
//                            newTrainingSet.addSample(originalDataSet[overallPerm.next()]);
                            newTrainingSet.addSample(minorityDataSet[minorityPerm.next()]);
                            newTrainingSet.addSample(minorityDataSet[minorityPerm.next()]);
                            Error++;
                        }
                    }
                    predictLog << std::endl;
                }
                performanceMeasure.end();
                LOG(INFO) <<
                performanceMeasure.getPeriod() <<
                "  Error:" << Error << "/" <<
                PositiveSample << "/" << NegSample;
                LOG(INFO) << statics.toString();
            }
            solverState.runningPhase = CONTINUE_TRAINING;
            solverState.lastDBName = DBName;
            saveState(&solverState);
        }
    }

}
