#include "classifier.h"

Classifier::Classifier()
{
}

Classifier::~Classifier()
{
    SVM.clear();
    mlp.clear();
    boost.clear();
}

void Classifier::pcaCompress(const QString &fileName)
{
    Mat inputMat, flagMat, compress;//, test, testCompress;
    QString pcaFile = "./output/pca_";

    // Get input mat and flag mat
    getInputMat(fileName, inputMat, flagMat);
    int maxComp = inputMat.cols;
    // Perform actual PCA operation
    compress = doPCA(inputMat, maxComp);

//    inputMat.copyTo(test);

//    for(int j = 0; j < inputMat.cols; j++)
//    {
//        Scalar avg = mean(inputMat.col(j));
//        for(int i = 0; i < inputMat.rows; i++)
//        {
//            float v = inputMat.at<float>(i, j) - (float)avg(0);
//            test.at<float>(i, j) = v;
//        }
//    }

//    testCompress = Mat(inputMat.rows, eigenVector.rows, CV_32F);
//    gemm(test, eigenVector, 1.0, Mat(), 0.0, testCompress, CV_GEMM_B_T);

    // Indicate output file name with number principle components
    pcaFile = pcaFile.append(QString::number(maxComp) + ".csv");

    try
    {
        ofstream fout;
        fout.open(pcaFile.toStdString().c_str(), ofstream::out);

        for(int i=0; i < inputMat.rows; i++)
        {
            for(int j = 0; j < maxComp; j++)
            {
                fout << compress.at<float>(i, j) << ",";
            }
            fout << flagMat.at<float>(i, 0) << "\n";
        }
        fout.close();
    }
    catch(exception& e)
    {
        qDebug() << e.what();
    }

    qDebug() << "PCA has completed";
}

void Classifier::trainSVM(const QString &fileName)
{
    Mat inputMat, flagMat;
    Mat trainingMat, testMat, new_response;

    // Get input mat and flag mat
    getInputMat(fileName, inputMat, flagMat);
    if(!inputMat.data) return;
    getTrainingTestMat(inputMat, trainingMat, testMat);

    int nrows = trainingMat.rows, trows = testMat.rows;

    new_response = Mat(nrows, 1, CV_32F);
    for(int i = 0; i < nrows; i++)
    {
        Mat r = flagMat.row(i);
        r.copyTo(new_response.row(i));
    }
    // Set up SVM's parameters
    CvSVMParams params;

    //////////////////////////////////////////////////////////////////////
    qDebug() << "\nstart SVM training..." << fileName << "\n";
    SVM = CvSVM();
    // Train SVM with number of times
    SVM.train_auto(trainingMat, new_response, Mat(), Mat(), params, 20);
    qDebug() << "done SVM training...\n";
    /////////////////////////////////////////////////////////////////////

    // SVM predict response
    float res, tar;
    int mis = 0, acc = 0;
    try
    {
        ofstream fout;
        fout.open("./output/svmtest.csv", ofstream::out);
        for(int i = 0; i < trows; i++)
        {
            Mat n_row = testMat.row(i);
            // SVM prediction
            res = SVM.predict(n_row);
            tar = flagMat.at<float>(i+nrows, 0);

            if(tar == res || (tar == -1 && res == 0)) acc++;
            else mis++;

            // Output to file
            fout << "predict," << res << ",target," << tar << "\n";
        }

        fout << "SVM type : " << params.svm_type << " SVM kernel type : " <<
                params.kernel_type << endl;
        fout << "accurate : " << acc << "mis : " << mis << "total : " << testMat.rows;
        SVM.save("./output/SVM.xml");
        qDebug() << "done prediction...SVM file has been as SVM.xml";
        fout.close();
    }
    catch(exception& e)
    {
        qDebug() << e.what();
    }
}

void Classifier::trainMLP(const QString &fileName)
{
    Mat inputMat, flagMat;
    Mat trainingMat, testMat;
    Mat new_response, mlp_response;

    int rows, nrows, cols, class_count=2;

    // Get input mat and flag mat
    getInputMat(fileName, inputMat, flagMat);
    if(!inputMat.data) return;
    getTrainingTestMat(inputMat, trainingMat, testMat);

    rows = inputMat.rows;
    nrows = trainingMat.rows;
    cols = trainingMat.cols;

    new_response = Mat(nrows, class_count, CV_32F);

    for(int i=0; i < nrows; i++)
    {
        float flag = flagMat.at<float>(i, 0);
        new_response.at<float>(i, 0) = (flag==0 || flag==-1)? 1 : 0;
        new_response.at<float>(i, 1) = (flag == 1) ? 1 : 0;
    }

    //////////////////////////////////////////////////////////////////////////
    qDebug() << "\nstart MLP training...."<< fileName << "\n";
    int layer_sz[] = {cols, 100, 100, class_count};
    Mat layer_sizes(1, (int)(sizeof(layer_sz)/sizeof(layer_sz[0])), CV_32S, layer_sz );

    mlp.create(layer_sizes);
    mlp.train(trainingMat, new_response, Mat(), Mat(),
               CvANN_MLP_TrainParams(cvTermCriteria(CV_TERMCRIT_ITER, 500, 0.01),
                                     //CvANN_MLP_TrainParams::RPROP, 0.05));
                                     CvANN_MLP_TrainParams::BACKPROP, 0.1, 0.1));
    qDebug() << "done MLP training.....\n";
    /////////////////////////////////////////////////////////////////////////

    mlp_response = Mat(1, class_count, CV_32F);
    int acc = 0, mis = 0;
    int trows = rows-nrows;
    try
    {
        ofstream fout;
        fout.open("./output/mlptest.csv", ofstream::out);
        for(int k = 0; k < trows; k++)
        {
            int max;
            Mat n_row = testMat.row(k);
            mlp.predict(n_row, mlp_response);
            minMaxIdx(mlp_response, 0, 0, 0, &max, Mat());
            //minMaxLoc(mlp_response, 0, 0, 0, &max, Mat());

            int tar = (int)flagMat.at<float>(k+nrows, 0);

            if(tar == max || (tar == -1 && max == 0))
            {
                fout << "predict, " << max << ",target, " << tar << ",acc\n";
                acc++;
            }
            else
            {
                fout << "predict, " << max << ",target, " << tar << ",mis\n";
                mis++;
            }


        }
        fout << "accurate : " << acc << " mis : " << mis << " total : " << trows;
        mlp.save("./output/MLP.xml");
        qDebug() << "done prediction...MLP file has been as MLP.xml";
        fout.close();
    }
    catch(exception& e)
    {
        qDebug() << e.what();
    }
}

void Classifier::trainBoost(const QString &fileName)
{
    Mat inputMat, flagMat;
    Mat trainingMat, testMat;
    Mat new_response, var_type, missing;

    int rows, nrows, cols;

    // Get input mat and flag mat
    getInputMat(fileName, inputMat, flagMat);
    if(!inputMat.data) return;
    getTrainingTestMat(inputMat, trainingMat, testMat);

    rows = inputMat.rows;
    nrows = trainingMat.rows;
    cols = trainingMat.cols;

    new_response = Mat(nrows, 1, CV_32F);
    missing = Mat(trainingMat.size(), CV_8U);
    var_type = Mat(cols+1, 1, CV_8U);

    CvMat vtype = var_type;
    cvSet(&vtype, cvScalarAll(CV_VAR_ORDERED) );
    cvSetReal1D(&vtype, cols, CV_VAR_CATEGORICAL );

    for(int i = 0; i < nrows; i++)
    {
        new_response.at<float>(i, 0) = flagMat.at<float>(i, 0);
    }

    ////////////////////////////////////////////////////////////////
    qDebug() << "\nstart boost training...."<< fileName << "\n";
    boost.train(trainingMat, CV_ROW_SAMPLE, new_response, Mat(), Mat(), var_type, Mat(),
        CvBoostParams(CvBoost::DISCRETE, 100, 0.95, 5, false, 0 ));
    qDebug() << "done boost training....\n";
    ////////////////////////////////////////////////////////////////

    int acc = 0, mis = 0;
    int trows = rows-nrows;
    float res, tar;

    try
    {
        ofstream fout;
        fout.open("./output/boosttest.csv", ofstream::out);
        for(int i = 0; i < trows; i++)
        {
            Mat n_row = testMat.row(i);
            res = boost.predict(n_row, missing);
            tar = flagMat.at<float>(i+nrows, 0);

            if(res == tar || (res == 0 && tar == -1)) acc++;
            else mis++;
            // Output to file
            fout << "predict," << res << ",target," << tar << "\n";

        }
        fout << "accurate : " << acc << "mis : " << mis << "total : " << testMat.rows;
        boost.save("./output/boost.xml");
        qDebug() << "done prediction...Boost file has been as boost.xml";
        fout.close();
    }
    catch (exception &e)
    {}
}

bool Classifier::loadSVM(const QString &fileName)
{
    if(fileName.length() > 0)
    {
        SVM.load(fileName.toStdString().c_str());
        if(SVM.get_support_vector_count()) return true;
    }

    return false;
}

bool Classifier::loadMLP(const QString &fileName)
{
    // CvANN_MLP mlp;
    if(fileName.length() > 0)
    {
        mlp.load(fileName.toStdString().c_str());
        if(mlp.get_layer_count()) return true;
    }

    return false;
}
bool Classifier::loadBoost(const QString &fileName)
{
    if(fileName.length() > 0)
    {
        boost.load(fileName.toStdString().c_str());
        if(boost.get_weak_predictors()->total > 0)
            return true;
    }
    return false;
}

/*
 *  Perform prediction for row mat @n_row with selected
 *  classifier.
 *
 *  @c_type = 0 [use SVM to classify]
 *  @c_type = 1 [use MLP to classify]
 *  @c_type = 2 [use Boost to classify]
 *
 */
void Classifier::classify(double **dPts, QList<SeedNode *> &regions, int c_type, int dim)
{
    // classification result
    float res = 0;    
    // Number of seeds
    int rows = regions.size();

    Mat seeds(rows, dim, CV_32F);
    // Convert double array to Mat
    getDataMat(dPts, seeds, dim);

    Mat compress;
    if(eigenVector.data)
    {
        // PCA compressed row mat
        compress = Mat(rows, eigenVector.rows, CV_32F);
        gemm(seeds, eigenVector, 1.0, Mat(), 0.0, compress, CV_GEMM_B_T);
    }
    else return;

    if(c_type == 0 && SVM.get_support_vector_count() > 0)
    {
        for(int i = 0; i < rows; i++)
        {
            Mat n_row = compress.row(i);

            res = SVM.predict(n_row);
            regions.at(i)->val = (res == 1) ? 1 : 0;
        }
    }
    else if(c_type == 1 && mlp.get_layer_count() > 0)
    {
        for(int i = 0; i < rows; i++)
        {
            Mat mlp_response(1, 2, CV_32FC1);
            Mat n_row = compress.row(i);
            int max = 0;
            mlp.predict(n_row, mlp_response);
            minMaxIdx(mlp_response, 0, 0, 0, &max, Mat());
            res = (float) max;

            regions.at(i)->val = (res == 1) ? 1 : 0;
        }
    }
    else if(c_type == 2 && boost.get_weak_predictors()->total > 0)
    {
        qDebug() << " missing ";
        const CvMat *missing = boost.get_data()->train_data;
        Mat missingMat;
        qDebug() << " missing again";
        if(missing)
        {
            qDebug() << " missing one more time";
            missingMat = Mat(missing->height, missing->width, CV_8U);
        }
        qDebug() << " missing last time";
        for(int i = 0; i < rows; i++)
        {
            Mat n_row = compress.row(i);

            res = boost.predict(n_row, missingMat);
            regions.at(i)->val = (res == 1) ? 1 : 0;
        }
    }
}

void Classifier::getInputMat(const QString &fileName, Mat &inputMat, Mat &flagMat)
{
    // Open input file
    QFile file(fileName);
    if(file.open(QIODevice::ReadOnly))
    {
        QTextStream in(&file);
        QStringList list;
        // Feature values
        vector<Mat> inputVect;
        // Flag value of germination
        vector<int> flagVect;
        int rows, cols;

        while(!in.atEnd())
        {
            list = in.readLine().simplified().split(",");
            // Columns of data
            cols = list.size() - 1;
            Mat rowMat(1, cols, CV_32FC1);

            for(int j = 0; j < cols; j++)
            {
                rowMat.at<float>(0, j) = list.at(j).toFloat();
            }

            inputVect.push_back(rowMat);
            // Last column is the flag value
            flagVect.push_back(list.at(cols).toInt());
        }

        rows = inputVect.size();
        inputMat = Mat(rows, cols, CV_32FC1);
        flagMat = Mat(rows, 1, CV_32FC1);

        for(int i = 0; i < rows; i++)
        {
            // Get each row of data
            Mat row = inputVect.at(i);
            // Copy each row to input mat
            row.copyTo(inputMat.row(i));
            // Get each flag
            flagMat.at<float>(i, 0) = flagVect.at(i);
        }

        // Clear memory
        inputVect.clear();
        flagVect.clear();
        file.close();
    }
}

void Classifier::getTrainingTestMat(Mat &inputMat, Mat &trainingMat, Mat &testMat)
{
    int rows = inputMat.rows;
    int cols = inputMat.cols;

    int nrows = (int)rows*0.8;
    trainingMat = Mat(nrows, cols, CV_32FC1);
    testMat = Mat(rows-nrows, cols, CV_32FC1);

    int k;
    for(k = 0; k < nrows; k++)
    {
        Mat row = inputMat.row(k);
        row.copyTo(trainingMat.row(k));
    }
    for(k = nrows; k < rows; k++)
    {
        Mat row = inputMat.row(k);
        row.copyTo(testMat.row(k-nrows));
    }
}

void Classifier::getDataMat(double **dPts, Mat &seeds, int dim)
{
    int total = seeds.rows;
    Scalar avg;
    float v;

    for(int i = 0; i < total; i++)
    {
        for(int j = 0; j < dim; j++)
        {
            seeds.at<float>(i, j) = (float)dPts[i][j];
        }
    }

    for(int j = 0; j < dim; j++)
    {
        avg = mean(seeds.col(j));
        for(int i = 0; i < total; i++)
        {
            v = seeds.at<float>(i, j) - (float)avg(0);
            seeds.at<float>(i, j) = v;
        }
    }
}

/*
 *  Perform actual PCA operation
 *  Return number of principle components
 *
 */
Mat Classifier::doPCA(Mat &inputMat, int maxComp)
{
    // Perform PCA operation
    PCA pca(inputMat, Mat(), CV_PCA_DATA_AS_ROW, maxComp);
    eigenVector = Mat(pca.eigenvectors.size(), CV_32F);
    pca.eigenvectors.copyTo(eigenVector);

    // Compressed input mat
    Mat compress;
    compress.create(inputMat.rows, maxComp, inputMat.type());
    pca.project(inputMat, compress);
    return compress;
}
