// LRReweight.cpp : Defines the entry point for the console application.
//

//add nothing , to test svn in xcode
#include "stdio.h"
#include "rootNode.h"
#include "projector.h"
#include "utility.h"
#include "visualizer.h"

#include <memory.h>


#include "liblinear/linear.h"

#include "libsvm/svm.h"


//#include <google/profiler.h>

#include <omp.h>

struct feature_s{
public:
	float* p_feature;
	float score;
	bool operator<(feature_s &b){
		/*note, this leads to descent sorting*/
		return this->score>b.score;
	}
};



bool runLR(std::list<feature_s> &posSet, std::list<feature_s> &negSet, int n_dim, float *weight, float &bias){
	int n_pos = posSet.size();
	int n_neg = negSet.size();

	struct parameter param;
	struct problem prob;
	struct model* model_;
	param.solver_type = L2R_LR;/*L2 reg logistic regression primal*/
//	param.solver_type = L2R_L2LOSS_SVC;
	param.C = 1e-4;/*cost C (default 1)*/
	param.eps = 1e-5; // tolerance of termination criterion
	param.nr_weight = 0;
	param.weight_label = NULL;
	param.weight = NULL;

	prob.bias  = 1;
	prob.l = n_pos + n_neg;
	prob.n = n_dim + 1;
	prob.x = (feature_node**)malloc(sizeof(feature_node*)*prob.l);
	prob.y = (int*)malloc(sizeof(int)*prob.l);

	feature_node* x_space = (feature_node*)malloc(sizeof(feature_node)*(prob.l*(prob.n+1))); /*here we use dense matrix*/ 



	/*fill in the positive features*/
	int off_set =0;
	std::list<feature_s>::iterator itr = posSet.begin();
	for(int i_pos = 0;i_pos<n_pos; ++i_pos){
		off_set= i_pos*(n_dim+2);/*one for bias, one for the end of feature signature(by setting index to -1)*/
		prob.x[i_pos]=&(x_space[off_set]);
		prob.y[i_pos]=1;
		for(int i_dim =0; i_dim<n_dim; ++i_dim){
			x_space[off_set+i_dim].index = i_dim+1;
			x_space[off_set+i_dim].value=(double)(itr->p_feature[i_dim]);
		}
		x_space[off_set + n_dim].index = n_dim+1;
		x_space[off_set + n_dim].value = 1;/*the bias term*/
		x_space[off_set + n_dim + 1].index = -1;/*end of feature*/
		++itr;
	}
	/*fill in negative feature*/
	itr = negSet.begin();
	for(int i_neg = 0;i_neg<n_neg; ++i_neg){
		off_set= (n_pos+i_neg)*(n_dim+2);
		prob.x[n_pos+i_neg]=&(x_space[off_set]);
		prob.y[n_pos+i_neg]=-1;
		for(int i_dim =0; i_dim<n_dim; ++i_dim){
			x_space[off_set+i_dim].index = i_dim+1;
			x_space[off_set+i_dim].value=(double)(itr->p_feature[i_dim]);
		}
		x_space[off_set + n_dim].index = n_dim+1;
		x_space[off_set + n_dim].value = 1;/*the bias term*/
		x_space[off_set + n_dim +1].index = -1; /*end of feature*/
		++itr;
		//if(itr==negSet.end()){
		//	printf("error:: reached the end of negSet \n");
		//	break;
		//}
	}

	/*cal Logistic Regression to reweight*/
	model_=train(&prob, &param);

	for(int i_dim = 0; i_dim<n_dim; ++i_dim){
		weight[i_dim]=(float)(model_->w[i_dim]*1e1);
	}
	bias = model_->w[n_dim]*1e1;

	free_and_destroy_model(&model_);
	printf("finished fitting model\n");

	free(prob.x);
	free(prob.y);
	free(x_space);
	return true;
}




bool runSVM(std::list<feature_s> &posSet, std::list<feature_s> &negSet, int n_dim, float *weight, float &bias){
	int n_pos = posSet.size();
	int n_neg = negSet.size();
    
	struct svm_parameter param;
	struct svm_problem prob;
	struct svm_model* model_;

	param.svm_type = C_SVC;
    param.kernel_type=0;
    param.C= 1e-4;
    param.eps = 1e-5;
    param.weight = NULL;
    param.weight_label=NULL;
    param.nr_weight=0;
    
    
	prob.l = n_pos + n_neg;
	prob.x = (svm_node**)malloc(sizeof(svm_node*)*prob.l);
	prob.y = (double*)malloc(sizeof(double)*prob.l);
    
	svm_node* x_space = (svm_node*)malloc(sizeof(svm_node)*(prob.l*(n_dim+1))); /*here we use dense matrix*/ 
    
    
    
	/*fill in the positive features*/
	int off_set =0;
	std::list<feature_s>::iterator itr = posSet.begin();
	for(int i_pos = 0;i_pos<n_pos; ++i_pos){
		off_set= i_pos*(n_dim+1);/* one for the end of feature signature(by setting index to -1)*/
		prob.x[i_pos]=&(x_space[off_set]);
		prob.y[i_pos]=1;
		for(int i_dim =0; i_dim<n_dim; ++i_dim){
			x_space[off_set+i_dim].index = i_dim+1;
			x_space[off_set+i_dim].value=(double)(itr->p_feature[i_dim]);
		}
		x_space[off_set + n_dim].index = -1;/*end of feature*/
		++itr;
	}
	/*fill in negative feature*/
	itr = negSet.begin();
	for(int i_neg = 0;i_neg<n_neg; ++i_neg){
		off_set= (n_pos+i_neg)*(n_dim+1);
		prob.x[n_pos+i_neg]=&(x_space[off_set]);
		prob.y[n_pos+i_neg]=-1;
		for(int i_dim =0; i_dim<n_dim; ++i_dim){
			x_space[off_set+i_dim].index = i_dim+1;
			x_space[off_set+i_dim].value=(double)(itr->p_feature[i_dim]);
		}
		x_space[off_set + n_dim].index = -1; /*end of feature*/
		++itr;
		//if(itr==negSet.end()){
		//	printf("error:: reached the end of negSet \n");
		//	break;
		//}
	}
    
	/*cal SVM to reweight*/
    svm_check_parameter(&prob, &param);
	model_=svm_train(&prob, &param);
    double* w = (double*)malloc(sizeof(double)*n_dim);
    memset(w,0,sizeof(double)*n_dim);
    int i_dim;
    for(int i_SV=0;i_SV<model_->l; ++i_SV){
       printf("proceeded to %d th SV,total %d\n",i_SV,model_->l);
	 double coef = model_->sv_coef[0][i_SV];
        svm_node *p_node = model_->SV[i_SV];
        while(p_node->index!=-1){
            i_dim=p_node->index-1;
            w[i_dim]+=p_node->value*coef;
	    ++p_node;   
     }
    }
    for(i_dim =0; i_dim<n_dim;++i_dim){
        weight[i_dim] = w[i_dim]*10;
    }
    
	bias = model_->rho[0]*10;
    
	svm_free_and_destroy_model(&model_);
	printf("finished fitting model\n");
    
	free(prob.x);
	free(prob.y);
	free(x_space);
        free(w);
	return true;
}




bool LRWithMining(std::list<feature_s> &posSet, std::list<feature_s> &negPool, int n_dim, float *weight, float &bias){

	printf("negPool Size: %d\n",negPool.size());
	std::list<feature_s> negSet;
	/*inital weight*/
	int i_itr=0;
	for(int i_dim= 0; i_dim<n_dim; ++i_dim) weight[i_dim]=1.0f;
	float score, min_pos_score,max_pos_score, max_neg_score;
	while(1){
		printf("ranking neg feature pool\n");
		/* run on pos and neg, to get new performance estimate*/	
		min_pos_score= -NEGMAX;
		max_pos_score = NEGMAX;
		max_neg_score= NEGMAX;
		for( std::list<feature_s>::iterator itr = posSet.begin(); 
			itr!=posSet.end(); ++itr){
				score =0;
				for(int i_dim= 0; i_dim<n_dim; ++i_dim){
					score += weight[i_dim]*itr->p_feature[i_dim];
				}
				itr->score = score;
				min_pos_score = std::min(min_pos_score,score);
				max_pos_score = std::max(max_pos_score,score);
		}
		float fp = 0;
		for( std::list<feature_s>::iterator itr = negPool.begin(); 
			itr!=negPool.end(); ++itr){
				score =0;
				for(int i_dim= 0; i_dim<n_dim; ++i_dim){
					score += weight[i_dim]*itr->p_feature[i_dim];
				}
				itr->score = score;
				if(score>min_pos_score){
					++fp;
				}
				max_neg_score = std::max(max_neg_score,score);
		}
		for( std::list<feature_s>::iterator itr = negSet.begin(); 
			itr!=negSet.end(); ++itr){
				score =0;
				for(int i_dim= 0; i_dim<n_dim; ++i_dim){
					score += weight[i_dim]*itr->p_feature[i_dim];
				}
				itr->score = score;
				if(score>min_pos_score){
					++fp;
				}
				max_neg_score = std::max(max_neg_score,score);
		}
		fp = fp/ (negPool.size()+negSet.size());
		float fn =0;
                for( std::list<feature_s>::iterator itr = posSet.begin();
                        itr!=posSet.end(); ++itr){
                	if(itr->score<max_neg_score){
				++fn;
			}
		}
		fn = fn/posSet.size();

		printf("negSet size %d, min_pos_score %f, max_pos_score: %f, max_neg_score %f, bias %f, fp rate %f, fn rate: %f\n", negSet.size(),min_pos_score,max_pos_score,max_neg_score, bias,fp,fn);
		/*save to weight.w file*/
		FILE *pf_weight =fopen("weighting.w","wb");
		if(pf_weight!=NULL){
			fwrite(weight,sizeof(float),n_dim,pf_weight);
			fclose(pf_weight);
		}
		else{
			printf("failed to output weighing.w\n");
		}
		/* add new ones to negset, and eliminate them from negPool*/
		printf("finished outputing w\n");
		if((int)negSet.size()>300*posSet.size()) break;
		negPool.sort();
		int i_inst = 0;
			for(; i_inst<2e5; ++i_inst){
				feature_s &tmp_feature = negPool.front();
				if(tmp_feature.score<min_pos_score){
				//	break;
				}
				if(negPool.empty()){
					printf("pool empty\n");
					break;
				}
				negSet.push_back(tmp_feature);
				negPool.pop_front();
			}
			/*check exit condition*/
			printf("%d new neg features added to negSet\n",i_inst);
			if(i_inst==0){
				break;
			}
			/* call LR*/
			//runLR(posSet,negSet,n_dim,weight,bias);
			runSVM(posSet,negSet,n_dim,weight,bias);
			++i_itr;
	

		printf("output negSet\n");
		FILE* p_file= fopen("negSet.f","wb");
		int n_neg_set = negSet.size();
		fwrite(&n_neg_set,sizeof(int),1,p_file);
		for(std::list<feature_s>::iterator itr = negSet.begin(); itr!=negSet.end();++itr){
			fwrite(itr->p_feature,sizeof(float),n_dim,p_file);
		}
		fclose(p_file);
	}
	/*clear memeory for posSet*/
	for(std::list<feature_s>::iterator itr = negSet.begin(); itr!=negSet.end(); ++itr){
		delete[] itr->p_feature;
	}

	return true;
}




int main(int argc, char *argv[]){

	/*input path parameters*/
	
	const char* model_path_name = "../../results";
	const char* pos_file_name = "./train_pos_file_list.txt";
	const char* neg_file_name = "./train_pos_file_list.txt";
	//const char* weight_file_name= "/home/wzhu/Documents/CarAoT/testCode/detection/weighting.w";	
	const char* weight_file_name = NULL;
		

	/*		
	const char* model_path_name = "D:/Projects/AoT/results";
	const char* pos_file_name = "D:/Projects/AoT/testCode/detection/train_pos_file_list.txt";
	const char* neg_file_name = "D:/Projects/AoT/testCode/detection/train_neg_file_list.txt";
	//const char* weight_file_name = "D:/Projects/AoT/testCode/detection/weighting.w";
	const char* weight_file_name = NULL;
	*/

	/*initial necessary memories*/
	std::list<feature_s> negPool;
	std::list<feature_s> posFeature;
	char img_name[500];

	visualizer window("main");
	int n_pos=0;
	int neg_pool_size=0;
	if(true){
		/*load the un-trained model*/
		rootNode node_root;
		std::vector<int> node_id; 
		for(int i =1; i<=12; ++i)	node_id.push_back(i);
		node_root.initialRootNode(model_path_name,node_id,weight_file_name);
		int n_dim = node_root.countFeatureDimension();

		/*process positive training images*/
		int n_file;
		FILE *p_file = fopen(pos_file_name,"r");
		if(p_file ==NULL){
			printf("Failed to open file %s \n", pos_file_name);
			return -1;
		}


		fscanf(p_file,"%d\n",&n_file);
		printf("%d files to process\n",n_file);
		for(int i_file = 0; i_file<n_file; ++i_file){
			if(feof(p_file)){
				printf("error: unexpected end of file\n");
				return -1;
			}

			fscanf(p_file,"%s\n",&img_name[0]);
			printf("processing file %s \n",img_name);
			/*read view from Mat file*/
			rootParticle_s root_particle;
			view_s view= utility::loadViewData(img_name);
			memcpy((void*)root_particle.P[0],(void*)view.P[0],sizeof(float)*12);
			for(int col = 0; col<4; ++col){
				view.P[0][col]*=view.zy;
				view.P[1][col]*=view.zx;
			}
			memcpy((void*)root_particle.cam_dir,(void*)view.cam_dir,sizeof(float)*3);
			node_root.loadImageToModel(img_name,"train_");
			
			node_root.test(view.P,view.cam_dir,root_particle);
	
                        char template_name[500];
			sprintf(template_name,"%s_template.bmp",img_name);
			printf("template name: %s\n",template_name);		
		        node_root.argMax(root_particle,const_cast<const char*>(&template_name[0]));

			printf("export complete\n");
			float *p_feature =new  float[n_dim];

			memset((void*)p_feature,0,sizeof(float)*n_dim);
			node_root.alignFeature(root_particle,p_feature);
			node_root.clearImageInModel();
			feature_s tmp_feature;
			tmp_feature.p_feature = p_feature;
			posFeature.push_back(tmp_feature);
		}
		fclose(p_file);

		printf("writting pos feature to pos_features.f\n");
                n_pos = posFeature.size();
                FILE* p_feature_file = fopen("pos_features.f","wb");
                fwrite(&n_dim,sizeof(int),1,p_feature_file);
                fwrite(&n_pos,sizeof(int),1,p_feature_file);
                for(std::list<feature_s>::iterator itr = posFeature.begin();
                        itr!=posFeature.end(); ++itr){
                                fwrite(itr->p_feature,sizeof(float),n_dim,p_feature_file);
                }
		fclose(p_feature_file);


		/*extract negative training data*/
		p_file = fopen(neg_file_name,"r");
		if(p_file ==NULL){
			printf("Failed to open file %s \n", neg_file_name);
			return -1;
		}

		fscanf(p_file,"%d\n",&n_file);
		printf("%d files to process\n",n_file);
		for(int i_file = 0; i_file<n_file; ++i_file){
			if(feof(p_file)){
				printf("error: unexpected end of file\n");
				return -1;
			}

			fscanf(p_file,"%s\n",&img_name[0]);
			printf("processing file %s \n",img_name);
			//if(i_file%4 !=0) {
			//	printf("File discarded for speed consideration\n");
			//	continue;
			//}
			
			std::deque<rootParticle_s> candidates;
			node_root.cropHardNegExample(img_name,candidates);
			//node_root.randomCollectNegExample(img_name,20,candidates);
			while(!candidates.empty()){
				float *p_feature = new float[n_dim];
				feature_s tmp_feature;
				tmp_feature.p_feature = p_feature;
				memset((void*)p_feature,0,sizeof(float)*n_dim);
				node_root.alignFeature(candidates.front(),p_feature);
				negPool.push_back(tmp_feature);
				candidates.pop_front();
			}

		}
		fclose(p_file);

		/*save feature to a file*/
		printf("writting to features.f\n");
		n_pos = posFeature.size();
		p_feature_file = fopen("features.f","wb");
		fwrite(&n_dim,sizeof(int),1,p_feature_file);
		fwrite(&n_pos,sizeof(int),1,p_feature_file);
		for(std::list<feature_s>::iterator itr = posFeature.begin(); 
			itr!=posFeature.end(); ++itr){
				fwrite(itr->p_feature,sizeof(float),n_dim,p_feature_file);
		}

		neg_pool_size = negPool.size();
		fwrite(&neg_pool_size,sizeof(int),1,p_feature_file);
		for(std::list<feature_s>::iterator itr = negPool.begin(); 
			itr!=negPool.end(); ++itr){
				fwrite(itr->p_feature,sizeof(float),n_dim,p_feature_file);
		}
		fclose(p_feature_file);

		printf("output complete\n");
		/*clear memory*/
		node_root.release();
		for(std::list<feature_s>::iterator itr = posFeature.begin(); 
			itr!=posFeature.end(); ++itr){
				delete[] (itr->p_feature);
		}
		printf("finished freeing pos feature\n");
		posFeature.clear();
		printf("posFeature cleared\n");
		for(std::list<feature_s>::iterator itr = negPool.begin();
			itr!=negPool.end(); ++itr){
				delete[] (itr->p_feature);
		}
		printf("finished clearing negPool\n");
		negPool.clear();
	}/*true or false*/

	/*load the feature file*/
	int n_dim;
	FILE* p_feature_in = fopen("features.f","rb");
	fread(&n_dim,sizeof(int),1,p_feature_in);
	fread(&n_pos,sizeof(int),1,p_feature_in);
	for(int i_pos = 0; i_pos<n_pos; ++i_pos){
		feature_s feature;
		posFeature.push_back(feature);
		posFeature.back().p_feature = new float[n_dim];
		fread(posFeature.back().p_feature,sizeof(float),n_dim,p_feature_in);
	}

	fread(&neg_pool_size,sizeof(int),1,p_feature_in);

	for(int i_neg = 0; i_neg<neg_pool_size; ++i_neg){
		feature_s feature;
		negPool.push_back(feature);
		negPool.back().p_feature = new float[n_dim];
		fread(negPool.back().p_feature,sizeof(float),n_dim,p_feature_in);
	}
	fclose(p_feature_in);

	/*call the LR regression*/
	float *weight = new float[n_dim];
	float bias;
	LRWithMining(posFeature,negPool,n_dim,weight,bias);

	/*clear memory*/
	for(std::list<feature_s>::iterator itr = posFeature.begin(); 
		itr!=posFeature.end(); ++itr){
			delete[] itr->p_feature;
	}

	for(std::list<feature_s>::iterator itr = negPool.begin();
		itr!=negPool.end(); ++itr){
			delete[] itr->p_feature;
	}


	return 1;
}




