/* * * * *
 *  train.cpp 
 *
 *  This program is free software: you can redistribute it and/or modify
 *  it under the terms of the GNU General Public License as published by
 *  the Free Software Foundation, either version 3 of the License, or
 *  (at your option) any later version.
 *
 *  Copyright (C) 2012, Rongkai Xia, shinekaixia@gmail.com
 *
 * * * * */

#include <iostream>
#include <fstream>
#include "cmdline.h"
#include <stdio.h>
#include <stdlib.h>
#include <string>
#include "TypeDef.h"
#include "RandomForestRegressor.h"
#include "RandomForestClassifier.h"
#include "GBMRegressor.h"
#include "GBMClassifier.h"
#include "ReadData.h"
using namespace std;

#ifdef DEBUG
//handle overflow
void signalHandler(int sig) {
    printf("Overflow detected\n");
    exit(1);
}
#endif

#define DEFAULT_N_TREES 100
#define DEFAULT_N_JOBS 1
#define DEFAULT_MAX_FEATURES_RATIO 1.0
#define DEFAULT_MIN_SAMPLE_LEAF 5
#define DEFAULT_MAX_DEPTH   4
#define DEFAULT_SUBSAMPLE   1.0
#define DEFAULT_SPLIT_CRITERION CRITERION_MSE
#define DEFAULT_LOSS SQUARE_LOSS
#define DEFAULT_LEARN_RATE 0.1
#define DEFAULT_OOB 0
#define DEFAULT_VERBOSE 0
#define DEFAULT_BOOTSTRAP 1
#define DEFAULT_COMPUTE_IMPORTANCE 0

static int GBM_param_len=11;
static int RF_param_len=11;
static int common_param_len=6;

static string split_criterion_str[4]={"","MSE","Entropy","Gini"};
static string loss_function_str[3]={"","Square Loss","Deviance"};
                               
static string common_param[6]={
    "alg",
    "train_x_fn",
    "train_y_fn",
    "validation_x_fn",
    "validation_y_fn",
    "model_fn"
};

static string common_help[6]={
    "choose algorithm to run TreeEnsemble.now support \"RF\",\"GBM\".",
    "path to the data point file of training data.",
    "path to the target file of training data.",
    "path to the data point file of validation data.(Optional)",
    "path to the target file of validation data.(Optional)",
    "path to save the model file."
};

static string GBM_param[11]={"alg", "n_jobs","loss","n_trees","max_depth","min_sample_leaf", "max_features_ratio","subsample","learn_rate","oob","verbose"};

static string RF_param[11]={"alg","n_jobs","split_criterion","n_trees","max_depth","min_sample_leaf", "max_features_ratio","bootstrap","oob","compute_importance","verbose"};

static string GBM_help[11]={
    "GBM",
    "the number of jobs for run GBM in parallel.(default=1)",
    "the loss function for GBM.\"sq\"(square loss) for regression.\"deviance\" for classification.(default=\"sq\")",
    "the number of boosting stage for GBM.Multi-class GBM will builded n_classes*n_trees trees,otherwise will builded n_trees trees.(default=100)",
    "the maximum depth of the individual trees.Tune this parameter for best performance(default=4)",
    "the minimum number of samples required to split a node.if the number of samples in a node is less than or equal to min_sample_leaf,it will be considered as a leaf node.(default=5)",
    "The fraction of features to consider when looking for the best split.0< max_feature_ratio <=1.0.(default=1.0)",
    "the fraction of samples to be used for the individual boosting stage. 0< subsample <=1.0.if subsample<1.0,this result in Stochastic Gradient Boosting.(default=1.0)",
    "learning rate for GBM.(default=0.1)",
    "whether to compute Out-Of-Bag score.Only work for subsample<1.0(default=false)",
    "wheter to print information during training.if verbose is greater than 0,it will print an information when every verbose trees are done.(default=0)",
};
static string RF_help[11]={
    "RF",
    "the number of jobs for run RF in parallel.(default=1)",
    "the split criterion of RF.\"mse\"(mean square error) for regression.\"gini\" and \"entropy\" for classification.(default=\"mse\")",
    "the number of trees in RF.(default=100)",
    "the maximum depth of the individual trees.if max_depth=0,the forest are full grown until all leaves contain less than or equal to min_sample_leaf samples.(default=0)",
    "the minimum number of samples required to split a node.if the number of samples in a node is less than or equal to min_sample_leaf,it will be considered as a leaf node.(default=5)",
    "the fraction of features to consider when looking for the best split.0<max_feature_ratio<=1.0.(default=1.0)",
    "whether bootstrap samples are used for building trees.(default=true)",
    "whether to compute Out-Of-Bag score.only work for bootstrap=true.(default=false)",
    "wheter to compute features importance.(default=false)",
    "wheter to print information during training.(default=0)",
};

static void print_help(){
    cmdline cmd;
    printf("Usage:\n");
    printf("-----------------------------------------------------\n");
    printf("Required parameters\n");
    printf("-----------------------------------------------------\n");
    cmd.register_help(common_param, common_help, common_param_len);
    cmd.print_help();
    printf("-----------------------------------------------------\n");
    printf("Optional parameters for Random Forest\n");
    printf("-----------------------------------------------------\n");
    cmd.register_help(RF_param, RF_help, RF_param_len);
    cmd.print_help();
    printf("-----------------------------------------------------\n");
    printf("Optional parameters for Gradient Boosting\n");
    printf("-----------------------------------------------------\n");
    cmd.register_help(GBM_param, GBM_help, GBM_param_len);
    cmd.print_help();
}

bool run_RF(cmdline& cmd);
bool run_GBM(cmdline& cmd);
int main(int argc, const char * argv[])
{
#ifdef DEBUG
    signal(SIGABRT, &signalHandler);
#endif
//    fprintf(stderr, "hello,world.123");
//    for (int i=0; i<10; i++) {
//        fprintf(stderr, "w");
//    }
//    for (int i=0; i<10; i++) {
//        fprintf(stderr, "\b");
//    }
    bool run_flag;
    srand((uint)time(NULL));
    seedMT(2*rand()+1);
    cmdline cmd;
    if (!cmd.parse_cmdline(argc, argv)){
        fprintf(stderr, "Please type -help for help.\n");
        return 1;
    }
    if (cmd.has_param("help")) {
        print_help();
        return 1;
    }
    if (!cmd.has_param("alg")) {
        fprintf(stderr, "Error: -alg must be set.Support algorithm are \"RF\",\"GBM\".\n");
        fprintf(stderr, "Please type -help for help.\n");
        return 1;
    }
    string alg=cmd.get_value("alg");
    transform(alg.begin(), alg.end(), alg.begin(), ::tolower);
    if (alg=="rf") {
        run_flag=run_RF(cmd);
        if (!run_flag) {
            return 1;
        }
    }else if(alg=="gbm"){
        run_flag=run_GBM(cmd);
        if (!run_flag) {
            return 1;
        }
    }else{
        fprintf(stderr, "Error: Unknow algorithm %s.Support algorithm are \"RF\",\"GBM\".\n",alg.c_str());
        fprintf(stderr, "Please type -help for help.\n");
        return 1;
    }
    return 0;
    string x_fn="/Users/apple/Desktop/11111111/111111.txt";
    string y_fn="/Users/apple/Desktop/y.txt";
    FILE* fp=fopen(x_fn.c_str(), "w");
    if (!fp) {
        cout<<"cannot open file "<<x_fn<<endl;
    }
    return 0;
    Data data;
    readData(data, x_fn.c_str(), y_fn.c_str());
    for (int i=0; i<data.n_samples; i++) {
        printf("%lf ",data.y[i]);
        for (int j=0; j<data.n_features; j++) {
            printf(" %d:%lf",j,data.X[i][j]);
        }
        printf("\n");
    }
    return 0;
    print_help();
    string s="  \t ";
    cout<<atoi(s.c_str())<<endl;
    return 0;
}

bool run_RF(cmdline& cmd){
    string* all_parameters=new string[common_param_len+RF_param_len];
    for (int i=0; i<common_param_len; i++) {
        all_parameters[i]=common_param[i];
    }
    for (int j=0; j<RF_param_len; j++) {
        all_parameters[common_param_len+j]=RF_param[j];
    }
    Data train,validation;
    uint n_jobs=DEFAULT_N_JOBS;
    uint n_trees=DEFAULT_N_TREES;
    uint max_depth=FOREST_TREE_MAX_DEPTH;
    uint min_sample_leaf=DEFAULT_MIN_SAMPLE_LEAF;
    REAL max_features_ratio=DEFAULT_MAX_FEATURES_RATIO;
    bool bootstrap=DEFAULT_BOOTSTRAP;
    bool oob=DEFAULT_OOB;
    bool compute_importance=DEFAULT_COMPUTE_IMPORTANCE;
    uint random_seed=0;
    bool verbose=DEFAULT_VERBOSE;
    int split_criterion=DEFAULT_SPLIT_CRITERION;
    uint n_features;
    string train_x_fn,train_y_fn,val_x_fn,val_y_fn,model_fn;
    
    //check parameters
    if (!cmd.check_param(all_parameters, RF_param_len+common_param_len) ){
        delete []all_parameters;
        return false;
    }
    delete []all_parameters;
    bool error_occur=false;
    char* endptr;
    string param;
    if (!cmd.has_param("train_x_fn")) {
        fprintf(stderr, "Error: -train_x_fn training data point file are required.\n");
        error_occur=true;
    }else {
        train_x_fn=cmd.get_value("train_x_fn");
    }
    if (!cmd.has_param("train_y_fn")) {
        fprintf(stderr, "Error: -train_y_fn training target file are required.\n");
        error_occur=true;
        
    }else {
        train_y_fn=cmd.get_value("train_y_fn");
    }
    if (cmd.has_param("validation_x_fn") || cmd.has_param("validation_y_fn")) {
        if (!(cmd.has_param("validation_x_fn") && cmd.has_param("validation_y_fn"))) {
            fprintf(stderr, "Error: validaton_x_fn and validation_y_fn must be set at the same time.\n");
            error_occur=true;
        }else{
            val_y_fn=cmd.get_value("validation_y_fn");
            val_x_fn=cmd.get_value("validation_x_fn");
        }
    }
    if (cmd.has_param("model_fn")) {
        model_fn=cmd.get_value("model_fn");
        //test model path,avoid write model error when training model done.
        FILE* fp=fopen(model_fn.c_str(), "w");
        if (!fp) {
            fprintf(stderr, "Error: try create model file %s error,no such directory.Please check your model file path.\n",model_fn.c_str());
            error_occur=true;
        }
    }
    if (cmd.has_param("n_jobs")) {
        param=cmd.get_value("n_jobs");
        n_jobs=(uint)strtol(param.c_str(), &endptr, 10);
        if (n_jobs<=0 || endptr == param.c_str() || (*endptr != '\0' && !isspace(*endptr))){
            fprintf(stderr, "Error: -n_jobs must be greater than 0(e.g,-n_jobs 2).\n");
            error_occur=1;
        }
    }
    if (cmd.has_param("n_trees")) {
        param=cmd.get_value("n_trees");
        n_trees=(uint)strtol(param.c_str(), &endptr, 10);
        if (n_trees<=0 || endptr == param.c_str() || (*endptr != '\0' && !isspace(*endptr))){            fprintf(stderr, "Error: -n_trees must be greater than 0(e.g,-n_trees 10).\n");
            error_occur=1;
        }
    }
    if (cmd.has_param("split_criterion")) {
        param=cmd.get_value("split_criterion");
        transform(param.begin(), param.end(), param.begin(), ::tolower);
        if (param=="mse") {
            split_criterion=CRITERION_MSE;
        }else if (param=="gini"){
            split_criterion=CRITERION_GINI;
        }else if (param=="entropy") {
            split_criterion=CRITERION_ENTROPY;
        }else {
            fprintf(stderr, "Error: Unknow split criterion \"%s\".Support criterion are \"mse\" for regression,\"gini\",\"entropy\" for classification.\n",param.c_str());
            error_occur=1;
        }
    }
    if (cmd.has_param("max_depth")) {
        param=cmd.get_value("max_depth");
        max_depth=(uint)strtol(param.c_str(), &endptr, 10);
        if (max_depth==1 || endptr == param.c_str() || (*endptr != '\0' && !isspace(*endptr))){
            fprintf(stderr, "Error: -max_depth must be greater than 1(e.g,-max_depth 10).\n");
            error_occur=1;
        }
        if (max_depth==0) {
            max_depth=FOREST_TREE_MAX_DEPTH;
        }
    }
    if (cmd.has_param("min_sample_leaf")) {
        param=cmd.get_value("min_sample_leaf");
        min_sample_leaf=(uint)strtol(param.c_str(), &endptr, 10);
        if (min_sample_leaf<1 || endptr == param.c_str() || (*endptr != '\0' && !isspace(*endptr))){
            fprintf(stderr, "Error: -min_sample_leaf must be greater than 0(e.g,-min_sample_leaf 5).\n");
            error_occur=1;
        }
    }
    if (cmd.has_param("max_features_ratio")) {
        param=cmd.get_value("max_features_ratio");
        max_features_ratio=strtod(param.c_str(), &endptr);
        if (max_features_ratio<=0.0 || max_features_ratio>1.0 || endptr == param.c_str() || (*endptr != '\0' && !isspace(*endptr))) {
            fprintf(stderr, "Error: -max_features_ratio must be in (0,1](e.g,-max_features_ratio 0.5).\n");
            error_occur=1;
        }
    }
    if (cmd.has_param("bootstrap")) {
        bootstrap=true;
        param=cmd.get_value("bootstrap");
        int bootstrap_flag=(int)strtol(param.c_str(), &endptr, 10);
        if (endptr == param.c_str() || (*endptr != '\0' && !isspace(*endptr))){
            fprintf(stderr, "Error: -bootstrap must be an integer(e.g,-bootstrap 1).\n");
            error_occur=1;
        }
        if (bootstrap_flag==0) {
            bootstrap=false;
        }
    }
    if (cmd.has_param("oob")) {
        oob=true;
        param=cmd.get_value("oob");
        int oob_flag=(int)strtol(param.c_str(), &endptr, 10);
        if (endptr == param.c_str() || (*endptr != '\0' && !isspace(*endptr))){
            fprintf(stderr, "Error: -oob must be an integer(e.g,-oob 1).\n");
            error_occur=1;
        }
        if (oob_flag==0) {
            oob=false;
        }
    }
    if (cmd.has_param("compute_importance")) {
        compute_importance=true;
        param=cmd.get_value("compute_importance");
        int importance_flag=(int)strtol(param.c_str(), &endptr, 10);
        if (endptr == param.c_str() || (*endptr != '\0' && !isspace(*endptr))){
            fprintf(stderr, "Error: -compute_importance must be an integer(e.g,-compute_importance 1).\n");
            error_occur=1;
        }
        if (importance_flag==0) {
            compute_importance=false;
        }
    }
    if (cmd.has_param("verbose")) {
        verbose=true;
        param=cmd.get_value("verbose");
        int verbose_flag=(int)strtol(param.c_str(), &endptr, 10);
        if (endptr == param.c_str() || (*endptr != '\0' && !isspace(*endptr))){
            fprintf(stderr, "Error: -verbose must be an integer(e.g,-verbose 1).\n");
            error_occur=1;
        }
        if (verbose_flag==0) {
            verbose=false;
        }
    }
    if (error_occur) {
        fprintf(stderr, "Please type -help for help.\n");
        return false;
    }
    
    //read data
    if (1) {
        fprintf(stderr, "reading train file....");
    }
    if (!readData(train, train_x_fn.c_str(), train_y_fn.c_str())){
        return false;
    }
    if (1) {
        fprintf(stderr, "done.\n");
    }
    //read validation data
    if (cmd.has_param("validation_x_fn")) {
        if (1) {
            fprintf(stderr, "reading validation file....");
        }
        if (!readData(validation, val_x_fn.c_str(), val_y_fn.c_str())) {
            return false;
        }
        if (validation.n_features!=train.n_features) {
            fprintf(stderr, "Error: The number of features in train file(%d features) and validation file(%d features) are not consistent.Please check input file.\n",train.n_features,validation.n_features);
            return false;
        }
        if (1) {
            fprintf(stderr, "done.\n");
        }
    }
    if (verbose) {
        fprintf(stderr, "=============================================\n");
        if (split_criterion==CRITERION_MSE) {
            fprintf(stderr, "| Using RandomForestRegressor\n");
        }else {
            fprintf(stderr, "| Using RandomForestClassifier\n");
        }
        fprintf(stderr, "| PARAMETERS:\n");
        fprintf(stderr, "| split_criterion=%s\n",split_criterion_str[split_criterion].c_str());
        fprintf(stderr, "| n_jobs=%d\n",n_jobs);
        fprintf(stderr, "| n_trees=%d\n",n_trees);
        fprintf(stderr, "| max_depth=%d\n",max_depth);
        fprintf(stderr, "| min_sample_leaf=%d\n",min_sample_leaf);
        fprintf(stderr, "| max_features_ratio=%lf\n",max_features_ratio);
        fprintf(stderr, "| bootstrap=%d\n",bootstrap);
        fprintf(stderr, "| oob=%d\n",oob);
        fprintf(stderr, "| compute_importance=%d\n",compute_importance);
        fprintf(stderr, "| verbose=%d\n",verbose);
        fprintf(stderr, "| training samples: %d\n",train.n_samples);
        fprintf(stderr, "| training features: %d\n",train.n_features);
        fprintf(stderr, "=============================================\n");
    }
    //training model
    if (split_criterion==CRITERION_MSE) {
        RandomForestRegressor* t=new RandomForestRegressor(n_trees, train.n_features, max_depth, min_sample_leaf, max_features_ratio, bootstrap, oob, compute_importance, random_seed, n_jobs, verbose);
        t->build(train.X, train.y, train.n_samples);
        if (cmd.has_param("model_fn")) {
            t->save_model(model_fn.c_str());
        }
        REAL* trn_pred=new REAL[train.n_samples];
        t->predict(train.X, trn_pred, train.n_samples, train.n_features);
        REAL trn_rmse=rmse(trn_pred, train.y, train.n_samples);
        REAL trn_r2=R2(trn_pred, train.y, train.n_samples);
        REAL trn_acc=Accuracy(train.y, trn_pred, train.n_samples);
        fprintf(stderr, "Train RMSE=%lf,Correlation Coefficient=%lf.\n",trn_rmse,trn_r2);
        if (cmd.has_param("validation_x_fn")) {
            REAL* pred=new REAL[validation.n_samples];
            t->predict(validation.X, pred, validation.n_samples, validation.n_features);
            REAL val_rmse=rmse(pred, validation.y, validation.n_samples);
            REAL val_r2=R2(pred, validation.y, validation.n_samples);
            REAL val_acc=Accuracy(validation.y, pred, validation.n_samples);
            fprintf(stderr, "Validation RMSE=%lf,Correlation Coefficient=%lf.\n",val_rmse,val_r2);
            delete pred;
        }
        delete trn_pred;
        delete t;
    }else {
        RandomForestClassifier* t=new RandomForestClassifier(split_criterion, n_trees, train.n_features, max_depth, min_sample_leaf, max_features_ratio, bootstrap, oob, compute_importance, random_seed, n_jobs, verbose);
        t->build(train.X, train.y, train.n_samples);
        if (cmd.has_param("model_fn")) {
            t->save_model(model_fn.c_str());
        }
        REAL* trn_pred=new REAL[train.n_samples];
        t->predict(train.X, trn_pred, train.n_samples, train.n_features);
        REAL trn_rmse=rmse(trn_pred, train.y, train.n_samples);
//        REAL trn_r2=R2(trn_pred, train.y, train.n_samples);
        REAL trn_acc=Accuracy(train.y, trn_pred, train.n_samples);
        fprintf(stderr, "Train RMSE=%lf,Acc=%lf.\n",trn_rmse,trn_acc);
        if (cmd.has_param("validation_x_fn")) {
            REAL* pred=new REAL[validation.n_samples];
            t->predict(validation.X, pred, validation.n_samples, validation.n_features);
            REAL val_rmse=rmse(pred, validation.y, validation.n_samples);
            REAL val_r2=R2(pred, validation.y, validation.n_samples);
            REAL val_acc=Accuracy(validation.y, pred, validation.n_samples);
            fprintf(stderr, "Validation RMSE=%lf,Acc=%lf.\n",val_rmse,val_acc);
            delete pred;
        }
        delete trn_pred;
        delete t;
    }
    return true;
}


bool run_GBM(cmdline& cmd){
    
    /*    int loss_function;
     LossFunction* loss;
     uint n_classes;
     
     Tree** tree;
     uint n_jobs;
     uint n_trees;
     uint n_features;
     uint max_depth;
     uint min_sample_leaf;
     uint max_features;
     REAL subsample;
     REAL learning_rate;
     REAL* importances;
     bool oob;
     bool compute_importance;
     uint random_seed;
     bool verbose;
*/
    string* all_parameters=new string[common_param_len+GBM_param_len];
    for (int i=0; i<common_param_len; i++) {
        all_parameters[i]=common_param[i];
    }
    for (int j=0; j<GBM_param_len; j++) {
        all_parameters[common_param_len+j]=GBM_param[j];
    }
    Data train,validation;
    uint n_jobs=DEFAULT_N_JOBS;
    int loss_function=DEFAULT_LOSS;
    uint n_trees=DEFAULT_N_TREES;
    uint max_depth=DEFAULT_MAX_DEPTH;
    uint min_sample_leaf=DEFAULT_MIN_SAMPLE_LEAF;
    REAL max_features_ratio=DEFAULT_MAX_FEATURES_RATIO;
    REAL learn_rate=DEFAULT_LEARN_RATE;
    REAL subsample=DEFAULT_SUBSAMPLE;
    bool oob=DEFAULT_OOB;
    uint random_seed=0;
    int verbose=DEFAULT_VERBOSE;
    bool compute_importance=false;
    uint n_features;
    string train_x_fn,train_y_fn,val_x_fn,val_y_fn,model_fn;
    
    //check parameters
    if (!cmd.check_param(all_parameters, GBM_param_len+common_param_len) ){
        delete []all_parameters;
        return false;
    }
    delete []all_parameters;
    bool error_occur=false;
    char* endptr;
    string param;
    if (!cmd.has_param("train_x_fn")) {
        fprintf(stderr, "Error: -train_x_fn training data point file are required.\n");
        error_occur=true;
    }else {
        train_x_fn=cmd.get_value("train_x_fn");
    }
    if (!cmd.has_param("train_y_fn")) {
        fprintf(stderr, "Error: -train_y_fn training target file are required.\n");
        error_occur=true;
        
    }else {
        train_y_fn=cmd.get_value("train_y_fn");
    }
    if (cmd.has_param("validation_x_fn") || cmd.has_param("validation_y_fn")) {
        if (!(cmd.has_param("validation_x_fn") && cmd.has_param("validation_y_fn"))) {
            fprintf(stderr, "Error: validaton_x_fn and validation_y_fn must be set at the same time.\n");
            error_occur=true;
        }else{
            val_y_fn=cmd.get_value("validation_y_fn");
            val_x_fn=cmd.get_value("validation_x_fn");
        }
    }
    if (cmd.has_param("model_fn")) {
        model_fn=cmd.get_value("model_fn");
        //test model path,avoid write model error when training model done.
        FILE* fp=fopen(model_fn.c_str(), "w");
        if (!fp) {
            fprintf(stderr, "Error: try create model file %s error,no such directory.Please check your model file path.\n",model_fn.c_str());
            error_occur=true;
        }
    }
    if (cmd.has_param("n_jobs")) {
        param=cmd.get_value("n_jobs");
        n_jobs=(uint)strtol(param.c_str(), &endptr, 10);
        if (n_jobs<=0 || endptr == param.c_str() || (*endptr != '\0' && !isspace(*endptr))){
            fprintf(stderr, "Error: -n_jobs must be greater than 0(e.g,-n_jobs 2).\n");
            error_occur=1;
        }
    }
    if (cmd.has_param("n_trees")) {
        param=cmd.get_value("n_trees");
        n_trees=(uint)strtol(param.c_str(), &endptr, 10);
        if (n_trees<=0 || endptr == param.c_str() || (*endptr != '\0' && !isspace(*endptr))){            fprintf(stderr, "Error: -n_trees must be greater than 0(e.g,-n_trees 10).\n");
            error_occur=1;
        }
    }
    if (cmd.has_param("loss")) {
        param=cmd.get_value("loss");
        transform(param.begin(), param.end(), param.begin(), ::tolower);
        if (param=="sq") {
            loss_function=SQUARE_LOSS;
        }else if (param=="deviance"){
            loss_function=MULTINOMIAL_DEVIANCE;//for classification,loss_function will be set in build() function.
        }else {
            fprintf(stderr, "Error: Unknow loss function \"%s\".Support loss are \"sq\"(square loss) for regression,\"deviance\" for classification.\n",param.c_str());
            error_occur=1;
        }
    }
    if (cmd.has_param("max_depth")) {
        param=cmd.get_value("max_depth");
        max_depth=(uint)strtol(param.c_str(), &endptr, 10);
        if (max_depth<=1 || endptr == param.c_str() || (*endptr != '\0' && !isspace(*endptr))){
            fprintf(stderr, "Error: -max_depth must be greater than 1(e.g,-max_depth 10).\n");
            error_occur=1;
        }
    }
    if (cmd.has_param("min_sample_leaf")) {
        param=cmd.get_value("min_sample_leaf");
        min_sample_leaf=(uint)strtol(param.c_str(), &endptr, 10);
        if (min_sample_leaf<1 || endptr == param.c_str() || (*endptr != '\0' && !isspace(*endptr))){
            fprintf(stderr, "Error: -min_sample_leaf must be greater than 0(e.g,-min_sample_leaf 5).\n");
            error_occur=1;
        }
    }
    if (cmd.has_param("max_features_ratio")) {
        param=cmd.get_value("max_features_ratio");
        max_features_ratio=strtod(param.c_str(), &endptr);
        if (max_features_ratio<=0.0 || max_features_ratio>1.0 || endptr == param.c_str() || (*endptr != '\0' && !isspace(*endptr))) {
            fprintf(stderr, "Error: -max_features_ratio must be in (0,1](e.g,-max_features_ratio 0.5).\n");
            error_occur=1;
        }
    }
    if (cmd.has_param("learn_rate")) {
        param=cmd.get_value("learn_rate");
        learn_rate=strtod(param.c_str(), &endptr);
        if (endptr == param.c_str() || (*endptr != '\0' && !isspace(*endptr)) || learn_rate<=0.0 || learn_rate>=1.0){
            fprintf(stderr, "Error: -learn_rate must be in (0,1)(e.g,-learn_rate 0.1).\n");
            error_occur=1;
        }
    }
    if (cmd.has_param("subsample")) {
        param=cmd.get_value("subsample");
        subsample=strtod(param.c_str(), &endptr);
        if (endptr == param.c_str() || (*endptr != '\0' && !isspace(*endptr)) || subsample<=0.0 || subsample>1.0){
            fprintf(stderr, "Error: -subsample must be in (0,1](e.g,-subsample 0.5).\n");
            error_occur=1;
        }
    }
    if (cmd.has_param("oob")) {
        oob=true;
        param=cmd.get_value("oob");
        int oob_flag=(int)strtol(param.c_str(), &endptr, 10);
        if (endptr == param.c_str() || (*endptr != '\0' && !isspace(*endptr))){
            fprintf(stderr, "Error: -oob must be an integer(e.g,-oob 1).\n");
            error_occur=1;
        }
        if (oob_flag==0) {
            oob=false;
        }
    }
    if (cmd.has_param("verbose")) {
        verbose=n_trees/10;
        param=cmd.get_value("verbose");
        verbose=(int)strtol(param.c_str(), &endptr, 10);
        if (endptr == param.c_str() || (*endptr != '\0' && !isspace(*endptr))){
            fprintf(stderr, "Error: -verbose must be an integer(e.g,-verbose 1).\n");
            error_occur=1;
        }
    }
    if (error_occur) {
        fprintf(stderr, "Please type -help for help.\n");
        return false;
    }
    
    //read data
    if (1) {
        fprintf(stderr, "reading train file....");
    }
    if (!readData(train, train_x_fn.c_str(), train_y_fn.c_str())){
        return false;
    }
    if (1) {
        fprintf(stderr, "done.\n");
    }
    //read validation data
    if (cmd.has_param("validation_x_fn")) {
        if (1) {
            fprintf(stderr, "reading validation file....");
        }
        if (!readData(validation, val_x_fn.c_str(), val_y_fn.c_str())) {
            return false;
        }
        if (validation.n_features!=train.n_features) {
            fprintf(stderr, "Error: The number of features in train file(%d features) and validation file(%d features) are not consistent.Please check input file.\n",train.n_features,validation.n_features);
            return false;
        }
        if (1) {
            fprintf(stderr, "done.\n");
        }
    }
    if (verbose) {
        fprintf(stderr, "=============================================\n");
        if (loss_function==SQUARE_LOSS) {
            fprintf(stderr, "| Using GBMRegressor\n");
        }else {
            fprintf(stderr, "| Using GBMClassifier\n");
        }
        fprintf(stderr, "| PARAMETERS:\n");
        fprintf(stderr, "| loss=%s\n",loss_function_str[loss_function].c_str());
        fprintf(stderr, "| n_jobs=%d\n",n_jobs);
        fprintf(stderr, "| n_trees=%d\n",n_trees);
        fprintf(stderr, "| max_depth=%d\n",max_depth);
        fprintf(stderr, "| min_sample_leaf=%d\n",min_sample_leaf);
        fprintf(stderr, "| max_features_ratio=%lf\n",max_features_ratio);
        fprintf(stderr, "| learn_rate=%lf\n",learn_rate);
        fprintf(stderr, "| subsample=%lf\n",subsample);
        fprintf(stderr, "| oob=%d\n",oob);
        fprintf(stderr, "| verbose=%d\n",verbose);
        fprintf(stderr, "| training samples: %d\n",train.n_samples);
        fprintf(stderr, "| training features: %d\n",train.n_features);
        fprintf(stderr, "=============================================\n");
    }
    //training model
    if (loss_function==SQUARE_LOSS) {
        GBMRegressor* t=new GBMRegressor(loss_function, n_trees, train.n_features, max_depth, min_sample_leaf, max_features_ratio, subsample, learn_rate, oob, compute_importance, random_seed, n_jobs, verbose);       
        if (cmd.has_param("validation_x_fn")) {
            t->build(train.X, train.y, train.n_samples,validation.X,validation.y,validation.n_samples);
        }else{
            t->build(train.X, train.y, train.n_samples);
        }
        if (cmd.has_param("model_fn")) {
            t->save_model(model_fn.c_str());
        }
        REAL* trn_pred=new REAL[train.n_samples];
        t->predict(train.X, trn_pred, train.n_samples, train.n_features);
        REAL trn_rmse=rmse(trn_pred, train.y, train.n_samples);
        REAL trn_r2=R2(trn_pred, train.y, train.n_samples);
        REAL trn_acc=Accuracy(train.y, trn_pred, train.n_samples);
        fprintf(stderr, "Train RMSE=%lf,Correlation Coefficient=%lf.\n",trn_rmse,trn_r2);
        if (cmd.has_param("validation_x_fn")) {
            REAL* pred=new REAL[validation.n_samples];
            t->predict(validation.X, pred, validation.n_samples, validation.n_features);
            REAL val_rmse=rmse(pred, validation.y, validation.n_samples);
            REAL val_r2=R2(pred, validation.y, validation.n_samples);
            REAL val_acc=Accuracy(validation.y, pred, validation.n_samples);
            fprintf(stderr, "Validation RMSE=%lf,Correlation Coefficient=%lf.\n",val_rmse,val_r2);
            delete pred;
        }
        delete trn_pred;
        delete t;
    }else {
        GBMClassifier* t=new GBMClassifier(loss_function, n_trees, train.n_features, max_depth, min_sample_leaf, max_features_ratio, subsample, learn_rate, oob, compute_importance, random_seed, n_jobs, verbose);
        if (cmd.has_param("validation_x_fn")) {
            t->build(train.X, train.y, train.n_samples,validation.X,validation.y,validation.n_samples);
        }else{
            t->build(train.X, train.y, train.n_samples);
        }
        if (cmd.has_param("model_fn")) {
            t->save_model(model_fn.c_str());
        }
        REAL* trn_pred=new REAL[train.n_samples];
        t->predict(train.X, trn_pred, train.n_samples, train.n_features);
        REAL trn_rmse=rmse(trn_pred, train.y, train.n_samples);
//        REAL trn_r2=R2(trn_pred, train.y, train.n_samples);
        REAL trn_acc=Accuracy(train.y, trn_pred, train.n_samples);
        fprintf(stderr, "Train RMSE=%lf,Acc=%lf.\n",trn_rmse,trn_acc);
        if (cmd.has_param("validation_x_fn")) {
            REAL* pred=new REAL[validation.n_samples];
            t->predict(validation.X, pred, validation.n_samples, validation.n_features);
            REAL val_rmse=rmse(pred, validation.y, validation.n_samples);
//            REAL val_r2=R2(pred, validation.y, validation.n_samples);
            REAL val_acc=Accuracy(validation.y, pred, validation.n_samples);
            fprintf(stderr, "Validation RMSE=%lf,Acc=%lf.\n",val_rmse,val_acc);
            delete pred;
        }
        delete trn_pred;
        delete t;
    }
    return true;
}