/*
 * Relation.cpp
 *
 *  Created on: May 26, 2012
 *      Author: Romeo
 */

#include "Relation.h"
#include <sys/time.h>
using namespace std;


extern void find_NEs(const ME_Model & me,Sentence & s);
extern void
bidir_decode_beam(vector<Token> & vt,
		const multimap<string, string> & tag_dictionary,
		const vector<ME_Model> & vme);
void stemSentence(Sentence *s);
extern void bidir_chunking(vector<Sentence> & vs, const vector<ME_Model> & vme);

void
bidir_chuning_decode_beam(vector<Token> & vt,
		const vector<ME_Model> & vme);

Relation::Relation() {
	//NumberOfFeatures=0;
	//Num=0;
	//	Vme=new vector(16);
	//	Vme_chunking=new vector(16);
	// TODO Auto-generated constructor stub
}
void Relation::write_to_XML(string path)
{
	pugi::xml_document doc;
	pugi::xml_node root=doc.append_child();
	get_XML(&root);
	doc.save_file(path.c_str());

}
void Relation::write_to_XML(string path,vector<Relation> relations)
{
	pugi::xml_document doc;
	pugi::xml_node root=doc.append_child("relations");
	for(int i=0;i<relations.size();i++)
	{
		pugi::xml_node relation=root.append_child();
		relations[i].get_XML(&relation);
	}
	doc.save_file(path.c_str());
}
void Relation::get_XML(pugi::xml_node* root)
{

	root->set_name("relation");
	pugi::xml_attribute attr=root->append_attribute("type");
	attr.set_value(relation_type.c_str());
	pugi::xml_node Sentence=root->append_child("sentence");
	string s="";
	for(int i=0;i<my_sentences->size();i++)
	{
		s+=my_sentences->at(i).str+" ";
	}
	//cout<<s<<endl;
	Sentence.append_child(pugi::node_pcdata).set_value(s.c_str());
	pugi::xml_node clue_node=root->append_child("clue");
	clue_node.append_child(pugi::node_pcdata).set_value(clue.c_str());

	pugi::xml_node entities=root->append_child("entities");
	pugi::xml_node entity=entities.append_child("entity");
	attr=entity.append_attribute("type");
	attr.set_value(name_entity_type_1.c_str());
	attr=entity.append_attribute("begin");
	attr.set_value(begin_entity1);
	attr=entity.append_attribute("end");
	attr.set_value(end_entity1);

	entity=entities.append_child("entity");
	attr=entity.append_attribute("type");
	attr.set_value(name_entity_type_2.c_str());
	attr=entity.append_attribute("begin");
	attr.set_value(begin_entity2);
	attr=entity.append_attribute("end");
	attr.set_value(end_entity2);
}

extern void tokenize(const string & s1, list<string> & lt);
vector<Relation> Relation::readFromXML(string path)
{
	pugi::xml_document doc;
	doc.load_file(path.c_str());
	pugi::xml_node root=doc.child("relations");
	//cout<<root.name()<<endl;
	vector<Relation> relations;
	relations.clear();
	for(pugi::xml_node relation=root.child("relation");relation;relation=relation.next_sibling("relation"))
	{
		Relation* curr=new Relation();
		curr->relation_type=relation.attribute("type").value();
		string rawsentence=relation.child("sentence").text().get();
		list<string> result;
		result.clear();
		tokenize(rawsentence,result);
		curr->my_sentences=new Sentence();
		list<string>::iterator it=result.begin();
		while(it!=result.end())
		{
			curr->my_sentences->push_back(Token(*it,""));
			it++;
		}
		clue=relation.child("clue").text().as_string();
		pugi::xml_node entity=relation.child("entities").first_child();
		curr->name_entity_type_1=entity.attribute("type").as_string();
		curr->begin_entity1=entity.attribute("begin").as_int();
		curr->end_entity1=entity.attribute("end").as_int();

		entity=entity.next_sibling();
		curr->name_entity_type_2=entity.attribute("type").as_string();
		curr->begin_entity2=entity.attribute("begin").as_int();
		curr->end_entity2=entity.attribute("end").as_int();

		relations.push_back(*curr);
		delete curr;
	}
	return relations;
}
extern void NLP_Processing(Sentence* sen, const vector<ME_Model> & vme, const vector<ME_Model> & chunking_vme,const ME_Model & NE);
extern void stemSentence(Sentence *s);
extern void init_morphdic();
void Relation::classifier(ME_Model ME_,ME_Sample mes)
{
	vector<double> mem(ME_.num_classes());
	mem = ME_.classify(mes);
	int label = 0;
	int other_class  = ME_.get_class_id("O");
	mem[other_class] -= 0.6;
	for (int l = 0; l < ME_.num_classes(); l++) {
		if (mem[l] > mem[label])
		{
			label = l;
		}
	}
	//cerr<<"Fadhfhk"<<endl;
	relation_type = ME_.get_class_label(label);
	//cerr<<relation_type<<endl;
	//cout<<"Classifier"<<endl;
}

void Relation::writetoHTML(vector<Relation>relations, string filename)
{
	string HTMl="<HTML>\n<Relations>\n";
	for(int i=0;i<relations.size();i++)
	{
		HTMl+="<Relation>\n";
		HTMl+=("<type>"+relations[i].relation_type+"</type>\n");
		HTMl+="<sentence>";
		for(int j=0;j<relations[i].my_sentences->size();j++)
		{
			HTMl+=relations[i].my_sentences->at(j).str+" ";
		}
		HTMl+="</sentence>\n";
		HTMl+="<entity1>";
		for(int j=relations[i].begin_entity1;j<relations[i].end_entity1;j++)
		{
			HTMl+=relations[i].my_sentences->at(j).str+" ";
		}
		HTMl+="</entity1>\n";
		HTMl+="<entity2>";
		for(int j=relations[i].begin_entity2;j<relations[i].end_entity2;j++)
		{
			HTMl+=relations[i].my_sentences->at(j).str+" ";
		}
		HTMl+="</entity2>\n";
		HTMl+="</Relation>\n";
	}
	HTMl+="</Relations>\n</HTML>";
	ofstream File((filename).c_str(), ios::out|ios::app);
	File<<HTMl;

}
void Relation::Evaluate(vector<Relation> ORiginal,vector<ME_Model> vme,vector<ME_Model> vme_chunking,ME_Model NE)
{
	int HitsCounter=0;
	int NumberOfTrueRelations=0;
	int TruePositive=0;
	int FalsePositive=0;
	int TrueNegative = 0;
	int FalseNegative;

	map<string,int>M_Truepositive;
	map<string,int>M_Falsepositive;
	map<string,int>M_NumberOfCorrect;

	for(int i=0;i<ORiginal.size();i++)
	{
		if(ORiginal.at(i).relation_type != "O")
		{
			NumberOfTrueRelations++;
			map<string,int>::iterator it=M_NumberOfCorrect.find(ORiginal.at(i).relation_type);
			if(it==M_NumberOfCorrect.end())
			{
				M_NumberOfCorrect.insert(pair<string,int>(ORiginal.at(i).relation_type,1));
				M_Falsepositive.insert(pair<string,int>(ORiginal.at(i).relation_type,0));
				M_Truepositive.insert(pair<string,int>(ORiginal.at(i).relation_type,0));
			}
			else
			{
				it->second++;
			}
		}
	}
	double precesion;
	double Recall;
	ME_Model Found_or_NOT;
	ME_Model CheckRelation;
	vector<Relation> CopiedVector;
	CheckRelation.load_from_file("models_named_entity/Classifier");//to be set later
	Found_or_NOT.load_from_file("models_named_entity/Found_OR_Not");

	//copy
	for(int i=0;i<ORiginal.size();i++)
	{
		CopiedVector.push_back(ORiginal.at(i));
	}

	for(int i=0;i<CopiedVector.size();i++)
	{
		//cout<<"Before: "<<CopiedVector.at(i).relation_type<<endl;
		CopiedVector.at(i).classifier(Found_or_NOT,CopiedVector.at(i).FeatureExtractor("?"));
		//cout<<"After: "<<CopiedVector.at(i).relation_type<<endl;
		cout<<i<<endl;
		if(CopiedVector.at(i).relation_type == "ContainRelation")
		{
			if(ORiginal.at(i).relation_type != "O")
				TruePositive++;
			else
				FalsePositive++;
		}
	}
	cout<<TruePositive<<"\t"<<HitsCounter<<endl;
	precesion=((double)TruePositive / ((double) TruePositive+FalsePositive))*100.0;
	Recall=((double)TruePositive / ((double)(NumberOfTrueRelations))) * 100.0;
	cout<<"Precesion : "<<precesion<<endl;
	cout<<"Recall : "<<Recall<<endl;


	for(int i=0;i<CopiedVector.size();i++)
	{

		if(CopiedVector.at(i).relation_type == "ContainRelation")
		{
			cout<<i<<endl;
			CopiedVector.at(i).classifier(CheckRelation,CopiedVector.at(i).FeatureExtractor("?"));
			if(ORiginal.at(i).relation_type.compare(CopiedVector.at(i).relation_type) == 0)
			{
				map<string,int>::iterator it=M_Truepositive.find(CopiedVector.at(i).relation_type);
				it->second++;
			}
			else
			{
				map<string,int>::iterator it=M_Falsepositive.find(CopiedVector.at(i).relation_type);
				it->second++;
			}
		}
	}
	for(map<string,int>::iterator it=M_Truepositive.begin();it!=M_Truepositive.end();it++)
	{
		cout<<"Relation : "<<it->first<<endl;
		double tp=(double)M_Truepositive.find(it->first)->second;
		double fp=(double)M_Falsepositive.find(it->first)->second;
		double correct=(double)M_NumberOfCorrect.find(it->first)->second;
		//		cout<<"Tp : "<<tp<<endl;
		//		cout<<"Fp : "<<fp<<endl;
		//		cout<<"Correct: "<<correct<<endl;
		precesion= (tp/(tp+fp))*100.0;
		Recall= (tp/ correct) * 100.0;
		cout<<"Precesion : "<<precesion<<endl;
		cout<<"Recall : "<<Recall<<endl;
	}
}
ME_Sample Relation::FeatureExtractor(string label)
{
	vector<string> FirstEntity;
	vector<string> SecondEntity;
	ME_Sample MES;
	MES.label=label;
	int temp;
	if(begin_entity1 > begin_entity2)
	{
		temp = begin_entity1;
		begin_entity1 = begin_entity2;
		begin_entity2 = temp;
		temp=0;
		temp=end_entity1;
		end_entity1=end_entity2;
		end_entity2=temp;
	}
	//cout<<"Feature Exractor Partition 1"<<endl;
	//cout<<name_entity_type_1<<" "<<name_entity_type_2<<" "<<begin_entity1<<" "<<begin_entity2<<" "<<end_entity1<<" "<<end_entity2<<endl;
	for(int i=begin_entity1;i<end_entity1;i++)
		FirstEntity.push_back(my_sentences->at(i).str);
	for(int i=begin_entity2;i<end_entity2;i++)
		SecondEntity.push_back(my_sentences->at(i).str);
	//push entity based Features
	MES.features.push_back("NE_TYPE_"+name_entity_type_1);
	//cout<<"NE_TYPE_"+name_entity_type_1<<endl;
	MES.features.push_back("NE_TYPE_"+name_entity_type_2);
	//cout<<"NE_TYPE_"+name_entity_type_2<<endl;
	MES.features.push_back("NE_TYPE_CONCAT_"+name_entity_type_1+name_entity_type_2);
	//cout<<"NE_TYPE_CONCAT_"+name_entity_type_1+name_entity_type_2<<endl;
	MES.features.push_back("HEAD_"+FirstEntity.at(0));
	//cout<<"HEAD_"+FirstEntity.at(0)<<endl;
	MES.features.push_back("HEAD_"+SecondEntity.at(0));
	//cout<<"HEAD_"+SecondEntity.at(0)<<endl;
	//push word based features
	//	cout<<"HERE"<<endl;
	//each word
	//cout<<"Feature Exractor Partition 2"<<endl;
	for(int i=end_entity1;i<begin_entity2;i++)
	{
		MES.features.push_back("Word_"+my_sentences->at(i).str);
		//cout<<"Word_"+my_sentences->at(i).str<<endl;
	}
	//whole
	string whole = "whole_";
	for(int i=end_entity1;i<begin_entity2;i++)
	{
		whole+=my_sentences->at(i).str;
	}
	MES.features.push_back(whole);
	//cout<<whole<<endl;
	//bigrams
	string Bigram = "Bigram_";
	for(int i=end_entity1;i<begin_entity2;i++)
	{
		if(i == begin_entity2-1)
			break;
		Bigram+=(my_sentences->at(i).str+my_sentences->at(i+1).str);
		MES.features.push_back(Bigram);
		//NumberOfFeatures++;
		//	cout<<Bigram<<endl;
		Bigram="Bigram_";
	}
	//cout<<"Feature Exractor Partition 3"<<endl;
	//words in window size 2
	int Entity1_WindowBegin = begin_entity1-2;
	int Entity1_WindowEnd = end_entity1+2;
	int Entity2_WindowBegin = begin_entity2 - 2;
	int Entity2_WindowEnd = end_entity2 + 2;
	try{
		for(int i=Entity1_WindowBegin;i<begin_entity1;i++)
		{
			MES.features.push_back("BEFORE_2_"+my_sentences->at(i).str);
			//NumberOfFeatures++;
			//cout<<"BEFORE_"+my_sentences->at(i).str<<endl;
		}
		for(int i=end_entity1;i<Entity1_WindowEnd;i++)
		{
			MES.features.push_back("AFTER_2_"+my_sentences->at(i).str);
			//NumberOfFeatures++;
			//cout<<"AFTER_"+my_sentences->at(i).str<<endl;
		}
		for(int i=Entity2_WindowBegin;i<begin_entity2;i++)
		{
			MES.features.push_back("BEFORE_2_"+my_sentences->at(i).str);
			//NumberOfFeatures++;
			//cout<<"BEFORE_"+my_sentences->at(i).str<<endl;
		}
		for(int i=end_entity2;i<Entity2_WindowEnd;i++)
		{
			MES.features.push_back("AFTER_2_"+my_sentences->at(i).str);
			//NumberOfFeatures++;
			//cout<<"AFTER_"+my_sentences->at(i).str<<endl;
		}
	}catch(exception &e){}
	//cout<<"Feature Exractor Partition 4"<<endl;
	int Distance=0;
	for(int i=end_entity1;i<begin_entity2;i++)
	{
		Distance++;
	}
	stringstream s;
	s << Distance;
	MES.features.push_back("Distance_"+s.str());
	Sentence my_n_sentences;
	my_n_sentences.clear();
	for(int i=0;i<my_sentences->size();i++)
	{
		my_n_sentences.push_back(my_sentences->at(i));
	}
	int NumEntities=0;
	for(int i=end_entity1;i<begin_entity2;i++)
	{
		if(my_n_sentences.at(i).ne.compare("O") != 0)
			NumEntities++;
	}
	stringstream f;
	f<<NumEntities;
	MES.features.push_back("#ENTITIES_"+f.str());
	//NumberOfFeatures++;
	//cout<<"#ENTITIES_"+f.str()<<endl;
	//stemmed version
	//cout<<"Feature Exractor Partition 5"<<endl;
	Sentence n_my_sentences;
	n_my_sentences.clear();
	for(int i=0;i<my_sentences->size();i++)
	{
		n_my_sentences.push_back(my_sentences->at(i));
	}
	stemSentence(&n_my_sentences);
	for(int i=end_entity1;i<begin_entity2;i++)
	{
		MES.features.push_back("Stem_Word_"+n_my_sentences.at(i).str);
		//NumberOfFeatures++;
		//cout<<"Stem_Word_"+my_sentences->at(i).str<<endl;
	}
	//whole
	string Swhole = "Stem_whole_";
	for(int i=end_entity1;i<begin_entity2;i++)
		Swhole+=n_my_sentences.at(i).str;
	MES.features.push_back(Swhole);
	//NumberOfFeatures++;
	//cout<<Swhole<<endl;
	//bigrams
	string SBigram = "Stem_Bigram_";
	for(int i=end_entity1;i<begin_entity2;i++)
	{
		if(i == begin_entity2-1)
			break;
		SBigram+=(n_my_sentences.at(i).str+n_my_sentences.at(i+1).str);
		MES.features.push_back(SBigram);
		//NumberOfFeatures++;
		//cout<<SBigram<<endl;
		SBigram="Stem_Bigram_";
	}
	//cout<<"Feature Exractor Partition 6"<<endl;
	//stemmed version
	int SEntity1_WindowBegin = begin_entity1-2;
	int SEntity1_WindowEnd = end_entity1+2;
	int SEntity2_WindowBegin = begin_entity2-2;
	int SEntity2_WindowEnd = end_entity2+2;
	try{
		for(int i=SEntity1_WindowBegin;i<begin_entity1;i++)
		{
			MES.features.push_back("Stem_BEFORE_2_"+n_my_sentences.at(i).str);
			//NumberOfFeatures++;
			//cout<<"Stem_BEFORE_"+my_sentences->at(i).str<<endl;
		}
		for(int i=end_entity1;i<SEntity1_WindowEnd;i++)
		{
			MES.features.push_back("Stem_AFTER_2_"+n_my_sentences.at(i).str);
			//cout<<"Stem_AFTER_"+my_sentences->at(i).str<<endl;
		}
		for(int i=SEntity2_WindowBegin;i<begin_entity2;i++)
		{
			MES.features.push_back("Stem_BEFORE_2_"+n_my_sentences.at(i).str);
			//cout<<"Stem_BEFORE_"+my_sentences->at(i).str<<endl;
		}
		for(int i=end_entity2;i<SEntity2_WindowEnd;i++)
		{
			MES.features.push_back("Stem_AFTER_2_"+n_my_sentences.at(i).str);
			//cout<<"Stem_AFTER_"+my_sentences->at(i).str<<endl;
		}
	}catch(exception &e){}
	//cout<<"Feature Exractor Partition 7"<<endl;
	//cout<<"Feature Extractor"<<endl;
	//cerr<<"Finished "<<endl;
	return MES;
}
void Relation::NLPProcessing(const vector<ME_Model> & vme, const vector<ME_Model> & chunking_vme,const ME_Model & NE)
{
	const multimap<string, string> dummy;
	//  bidir_decode_search(vt, dummy, vme);
	bidir_decode_beam(*my_sentences, dummy, vme);
	for (size_t f = 0; f < my_sentences->size(); f++) {
		my_sentences->at(f).pos = my_sentences->at(f).prd;
	}
	bidir_chuning_decode_beam(*my_sentences, chunking_vme);
	find_NEs(NE,*my_sentences);
}

void Relation::TrainRelations(string OutPath,vector<Relation> RELATIONS,const vector<ME_Model> & vme, const vector<ME_Model> & chunking_vme,const ME_Model & NE)
{
	ME_Sample M;
	int count=0;
	vector<ME_Sample> Relation_trainvector;
	vector<ME_Sample> Other_trainvector;
	cout<<"Extracting Features.."<<endl;
	for(int i=0;i<RELATIONS.size();i++)
	{
		RELATIONS[i].NLPProcessing(vme,chunking_vme,NE);
		M=RELATIONS.at(i).FeatureExtractor(RELATIONS[i].relation_type);
		if(!M.features.empty())
		{
			if(M.label.compare("O")!=0)
				Relation_trainvector.push_back(M);
			else
				Other_trainvector.push_back(M);
		}
		count++;
		if(count%10 == 0)
			cout<<count<<" Finished"<<endl;
	}
	cout<<"End Feature Extraction.."<<endl;
	cout<<"Start Training classification.."<<endl;
	ME_Model m;
	m.use_l1_regularizer(1.0);
	m.set_heldout(1000);
	m.train(Relation_trainvector);
	m.save_to_file(OutPath+"Classify");
	for(int i=0;i<Relation_trainvector.size();i++)
	{
		ME_Sample M=Relation_trainvector[i];
		M.label="ContainRelation";
		Other_trainvector.push_back(M);
	}
	cout<<"Start Training.."<<endl;

	m.use_l1_regularizer(1.0);
	m.set_heldout(1000);
	m.train(Relation_trainvector);
	m.save_to_file(OutPath+"Classifier");

}
//void Relation::EvaluateSystem(vector<ME_Model> vme,vector<ME_Model> vme_chunking,ME_Model NE,vector<Relation> TestData)
//{
//
//	ME_Model Found_or_NOT;
//	ME_Model CheckRelation;
//	CheckRelation.load_from_file("models_named_entity/Classifier");//to be set later
//	Found_or_NOT.load_from_file("models_named_entity/Found_OR_Not");
//
//	int TruePositive=0;
//	int FalsePositive=0;
//	//extract Sentence
//	vector<Sentence*>Sentences;
//	for(int i=0;i<TestData.size();i++)
//		Sentences.push_back(TestData.at(i).my_sentences);
//	vector<Token> NEWNAMEDEnTITIES;
//	for(int i=0;i<Sentences.size();i++)
//	{
//		NEWNAMEDEnTITIES.clear();
//		NLP_Processing(Sentences.at(i),vme,vme_chunking,NE);
//		// ????????????????????????????? if Name entitiy takes more than one word
//		for(int j=0;j<Sentences.at(i)->size();j++)
//		{
//			if(Sentences.at(i)->at(j).ne.compare("") != 0)
//			{
//				NEWNAMEDEnTITIES.push_back(Sentences.at(i)->at(j));
//			}
//		}
//		// get all combinations
//		for(int j=0;j<NEWNAMEDEnTITIES.size();j++)
//		{
//			for(int k=j+1;k<NEWNAMEDEnTITIES.size();k++)
//			{
//				Relation *Temp=new Relation();
//				Temp->name_entity_type_1=NEWNAMEDEnTITIES.at(j);
//				Temp->name_entity_type_2=NEWNAMEDEnTITIES.at(k);
//				if(BEGIN(*Sentences.at(i),NEWNAMEDEnTITIES.at(j).str) >= 0)
//					Temp->begin_entity1=BEGIN(*Sentences.at(i),NEWNAMEDEnTITIES.at(j).str);
//				if(BEGIN(*Sentences.at(i),NEWNAMEDEnTITIES.at(k).str) >= 0)
//					Temp->begin_entity2=BEGIN(*Sentences.at(i),NEWNAMEDEnTITIES.at(k).str);
//				if(END(NEWNAMEDEnTITIES.at(j).str) !=0)
//					Temp->end_entity1=END(NEWNAMEDEnTITIES.at(j).str);
//				if(END(NEWNAMEDEnTITIES.at(j).str) !=0)
//					Temp->end_entity2=END(NEWNAMEDEnTITIES.at(k).str);
//				Temp->relation_type="?";
//				Temp->classifier(Found_or_NOT,Temp->FeatureExtractor(vme,vme_chunking,NE,false));
//				// ???????????? we should search in all relations of this sentence , one way is to make a map with
//				// it's key is the sentence and it's value is a vector of relations containing this sentence
//				// Given that expert may type the sentence more than once in order to put more than one relation
//				if(Temp->relation_type == "ContainRelation")
//				{
//					if(TestData.at(i).relation_type != "O")
//						TruePositive++;
//					else
//						FalsePositive++;
//				}
//				delete Temp;
//			}
//		}
//	}
//	int NumberOfTrueRelations=0;
//	for(int i=0;i<TestData.size();i++)
//	{
//		if(TestData.at(i).relation_type.compare("O") != 0)
//			NumberOfTrueRelations++;
//	}
//	cout<<"Precesion : "<<((double)TruePositive / ((double) TruePositive+FalsePositive))*100.0<<endl;
//	cout<<"Recall : "<<((double)TruePositive / ((double)(NumberOfTrueRelations))) * 100.0<<endl;
//	// Types of relations
//	for(int i=0;i<Sentences.size();i++)
//	{
//		NEWNAMEDEnTITIES.clear();
//		NLP_Processing(Sentences.at(i),vme,vme_chunking,NE);
//		for(int j=0;j<Sentences.at(i)->size();j++)
//		{
//			if(Sentences.at(i)->at(j).ne.compare("") != 0)
//			{
//				NEWNAMEDEnTITIES.push_back(Sentences.at(i)->at(j));
//			}
//		}
//		// get all combinations
//		for(int j=0;j<NEWNAMEDEnTITIES.size();j++)
//		{
//			for(int k=j+1;k<NEWNAMEDEnTITIES.size();k++)
//			{
//				Relation *Temp=new Relation();
//				Temp->name_entity_type_1=NEWNAMEDEnTITIES.at(j);
//				Temp->name_entity_type_2=NEWNAMEDEnTITIES.at(k);
//				if(BEGIN(*Sentences.at(i),NEWNAMEDEnTITIES.at(j).str) >= 0)
//					Temp->begin_entity1=BEGIN(*Sentences.at(i),NEWNAMEDEnTITIES.at(j).str);
//				if(BEGIN(*Sentences.at(i),NEWNAMEDEnTITIES.at(k).str) >= 0)
//					Temp->begin_entity2=BEGIN(*Sentences.at(i),NEWNAMEDEnTITIES.at(k).str);
//				if(END(NEWNAMEDEnTITIES.at(j).str) !=0)
//					Temp->end_entity1=END(NEWNAMEDEnTITIES.at(j).str);
//				if(END(NEWNAMEDEnTITIES.at(j).str) !=0)
//					Temp->end_entity2=END(NEWNAMEDEnTITIES.at(k).str);
//				Temp->relation_type="?";
//				Temp->classifier(CheckRelation,Temp->FeatureExtractor(vme,vme_chunking,NE,false));
//
//				//
//				//				if(Temp->relation_type == "ContainRelation")
//				//				{
//				//					if(TestData.at(i).relation_type != "O")
//				//						TruePositive++;
//				//					else
//				//						FalsePositive++;
//				//				}
//				delete Temp;
//			}
//		}
//	}
//	cout<<"Precesion : "<<((double)TruePositive / ((double) TruePositive+FalsePositive))*100.0<<endl;
//	cout<<"Recall : "<<((double)TruePositive / ((double)(NumberOfTrueRelations))) * 100.0<<endl;
//}
vector<Relation> Relation::FindRelations(Sentence s,ME_Model CheckRelation,ME_Model Found_or_NOT)
{
	int t;
	vector<int>Begins;
	vector<int>Ends;
	Begins.clear();
	Ends.clear();
	for(int i=0;i<s.size();i++)
	{
		if(s.at(i).ne.compare("") !=0 && s.at(i).ne.compare("O") !=0)
		{
			Begins.push_back(i);
			string ne = s.at(i).ne;
			ne=ne.substr(2,(ne.size()-2));
			for(int j=i+1;j<s.size();j++)
			{
				string currne=s.at(j).ne;
				if(currne.compare("O")!=0)
					currne=currne.substr(2,(currne.size()-2));
				if(currne.compare(ne) == 0)
				{
					i++;
				}else{
					Ends.push_back(j);
					break;}
			}
		}
		t=i;
	}
	Ends.push_back(t+1);
	vector<Relation> relations;

	int count=1;
	for(int i=0;i<Begins.size();i++)
	{
		int j=0;
		while(j<Begins.size())
		{
			if(i!=j)
			{
				Relation *Temp=new Relation();
				Temp->my_sentences=&s;
				Temp->name_entity_type_1=s.at(Begins.at(i)).ne;
				Temp->name_entity_type_2=s.at(Begins.at(j)).ne;
				Temp->begin_entity1=Begins.at(i);
				Temp->begin_entity2=Begins.at(j);
				Temp->end_entity1=Ends.at(i);
				Temp->end_entity2=Ends.at(j);
				Temp->relation_type="?";
				if(Temp->name_entity_type_1.compare("O") != 0 && Temp->name_entity_type_2.compare("O") != 0)
				{
					ME_Sample Sample=Temp->FeatureExtractor("?");
					Temp->classifier(Found_or_NOT,Sample);
					//cerr<<Temp->relation_type<<" "<<endl;
					if(Temp->relation_type == "ContainRelation")
					{
						//cerr<<"Entered"<<endl;
						Temp->classifier(CheckRelation,Sample);
						relations.push_back(*Temp);
						count++;
					}
				}
				delete Temp;
			}
			j++;
		}
	}
	return relations;
}


Relation::~Relation() {
	// TODO Auto-generated destructor stub
}
