package naive2;

import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Properties;

import org.json.JSONException;
import org.json.JSONObject;

import tools.DocRetriever;
import tools.LuceneNameRetriever;
import tools.QueryParser;
import tools.ResultOutput;
import tools.TfIdfCalculator;
import edu.stanford.nlp.pipeline.StanfordCoreNLP;
import entities.Doc;
import entities.Entity;
import entities.Query;
import entities.Result;

//use naive bag of words methods
// there should b a normalization between all the entities in the same query that if they all have a word,
//that word should become not that important
// use tf-idf for a query,i.e. calulate tf_idf for all documents in a query
public class GloabalTfIdfLinking {
	private StanfordCoreNLP pipeline;
	private QueryParser qp;
	private LuceneNameRetriever lr;
	private LuceneJsonRetriever lj;
	private DocRetriever dr ;
	private Doc2BoW d2b;
	//private J2BoWPOS j2b;
	//private JsonEntity2BoW1 j2b;
	//private JsonEntity2BoW j2b;
	//private Json2BoWStop j2b;
	private Json2BoWStop1 j2b;
	private BoW gloablaIdf;
	private int D;
	private String resultDir=null;
	public GloabalTfIdfLinking(){
		
		Properties props = new Properties();
		props.put("annotators", "tokenize, ssplit, pos, lemma");
		this.pipeline = new StanfordCoreNLP(props);
		
		this.qp = new QueryParser("/largedata1/cmput696/students/lyao1/data/dev.xml");
		this.lr = new LuceneNameRetriever("/largedata1/cmput696/luceneIndex/lucene-a2e");
		this.lj = new LuceneJsonRetriever("/largedata1/cmput696/students/lyao1/lemmaIndex");
		this.dr = new DocRetriever();
		this.d2b = new Doc2BoW(this.pipeline);
		
		this.lj.getReader().queryEntityJSONIndex("Michael Jordan");
		this.D = this.lj.getReader().name2id.keySet().size();
		System.out.println("D:"+this.D);
		
		//this.j2b = new JsonEntity2BoW(this.pipeline);
		//this.j2b = new JsonEntity2BoW1(this.pipeline);
		//use POS information
		//this.j2b = new J2BoWPOS(this.pipeline);
		//this.j2b = new Json2BoWStop(this.pipeline);
		this.j2b = new Json2BoWStop1(this.pipeline);
		
		this.gloablaIdf = TfIdfCalculator.idfCount("/largedata1/cmput696/students/lyao1/idfcount.txt");
		
		
	}
	
	public GloabalTfIdfLinking(String queryDir, String resultDir){
		
		this.resultDir = resultDir;
		
		Properties props = new Properties();
		props.put("annotators", "tokenize, ssplit, pos, lemma");
		this.pipeline = new StanfordCoreNLP(props);
		
		//this.qp = new QueryParser("/largedata1/cmput696/students/lyao1/data/dev.xml");
		this.qp = new QueryParser(queryDir);
		this.lr = new LuceneNameRetriever("/largedata1/cmput696/luceneIndex/lucene-a2e");
		this.lj = new LuceneJsonRetriever("/largedata1/cmput696/students/lyao1/lemmaIndex");
		this.dr = new DocRetriever();
		this.d2b = new Doc2BoW(this.pipeline);
		
		this.lj.getReader().queryEntityJSONIndex("Michael Jordan");
		this.D = this.lj.getReader().name2id.keySet().size();
		System.out.println("D:"+this.D);
		
		//this.j2b = new JsonEntity2BoW(this.pipeline);
		//this.j2b = new JsonEntity2BoW1(this.pipeline);
		//use POS information
		//this.j2b = new J2BoWPOS(this.pipeline);
		//this.j2b = new Json2BoWStop(this.pipeline);
		this.j2b = new Json2BoWStop1(this.pipeline);
		
		this.gloablaIdf = TfIdfCalculator.idfCount("/largedata1/cmput696/students/lyao1/idfcount.txt");
	}
	
	public void run() throws Exception{
		List<Query> qs = this.qp.getQueries();

		int size = qs.size();
		int count = 0;
		ArrayList<Result> rlist = new ArrayList<Result>();
		for(Query query: qs){
			count++;
			Result r = this.processOnequery(query);
			rlist.add(r);
			System.out.println("current:"+count+"/"+size);
		}
		
		ResultOutput rop;
		if(this.resultDir!=null){
			rop = new ResultOutput(this.resultDir,this.qp.getQueryPath(),rlist);
		}else{
			rop = new ResultOutput("/largedata1/cmput696/students/lyao1/data/result.xml",this.qp.getQueryPath(),rlist);
		}
		rop.setNT(30);
		rop.output_nt();
		//out put all
		//rop.output();
	}
	
	public Result processOnequery(Query query) throws Exception{
		
		//retireve doc based on query
		Doc doc = this.dr.getDocById(query.getDocid());
		
		System.out.println("start query:"+query.getName());
		BoW doc_bow = this.d2b.doc2bow(doc);
		
		//retrieve all the entities
		ArrayList<Entity> es =this.lr.getEntities(query.getName());
		
		
		
		for( Entity e: es){
			
			this.scoreAEntity(e,doc_bow);
			//this.scoreAEntity_title(e, doc_bow);
			//System.out.println("Finished scoring, entity:"+e.getName()+",score"+e.getScore());
		}
		
		Result result = new Result(query,es);
		
		System.out.println("finish queury:"+query.getName());
		return result;
	}
	
	public void scoreAEntity(Entity e,BoW doc_bow) throws JSONException{
	     
		//float score = doc_bow.tf_idf(idf_count, ebow, D);
		JsonEntity je = JsonEntity.instance(e, this.lj);
		
		String type = je.getType();
		BoW ebow = this.j2b.jentity2bow(je);
		
		System.out.println("***************************************************Start score eneity:"+e.getName());
		//float score = doc_bow.global_tf_idf(ebow, this.D, this.gloablaIdf);
		//float score = doc_bow.global_idf_tf1(ebow, this.D, this.gloablaIdf);
		//float score = doc_bow.global_idf_log_tf(ebow, this.D, this.gloablaIdf);
		float score = doc_bow.global_logtf_enhace_idf(ebow, this.D, this.gloablaIdf);
		//float score = doc_bow.global_logtf_enhace_idf1(ebow, this.D, this.gloablaIdf);
		//float score;
		//if(type.equals("LOC")){
		//	score = doc_bow.global_logtf_enhace_idf_me(ebow, this.D, this.gloablaIdf);
		//}else{
		//	score = doc_bow.global_logtf_enhace_idf(ebow, this.D, this.gloablaIdf);
	//	}
		System.out.println("**********************************Finish score entity:"+e.getName()+" score:"+score);
		e.setScore(score);
	}
	
	
	//add extra score to entity, if entity's name is contained in doc_bow
	public void scoreAEntity_title(Entity e, BoW doc_bow) throws JSONException{
		//float score = doc_bow.tf_idf(idf_count, ebow, D);
		JsonEntity je = JsonEntity.instance(e, this.lj);
		BoW ebow = this.j2b.jentity2bow(je);
				
		System.out.println("***************************************************Start score eneity:"+e.getName());
		//float score = doc_bow.global_tf_idf(ebow, this.D, this.gloablaIdf);
		//float score = doc_bow.global_idf_tf1(ebow, this.D, this.gloablaIdf);
		//float score = doc_bow.global_idf_log_tf(ebow, this.D, this.gloablaIdf);
		float score = doc_bow.global_logtf_enhace_idf(ebow, this.D, this.gloablaIdf);
		System.out.println("Finish tf itf score entity:"+e.getName()+" score:"+score);
		JSONObject jobj = je.getJobj();
		String[] names = this.j2b.getNameString(jobj).split(" ");
	     for(String s:names){
	    	 //if not in stop word list
	    	 
	    	 if( s.equals("''")||s.equals("-")||s.equals(":")||s.equals("``")||s.equals("--")||s.equals("@")||s.equals("'")||s.equals("`")||s.equals(";")){
	    		 continue;
	    	 }
	    	 if( this.j2b.stopbow.getFreq(s)==0){
	    		if( doc_bow.getFreq(s)>0){
	    			score = score + 50;
	    			System.out.println("enitty:"+e.getName()+" add 50 score for its title word:"+s+" in doc");
	    		}
	    	 }

	      }
	     System.out.println("**********************FINISH OVERALL SCORE FOR Entity:"+e.getName()+" score:"+score);
		
		e.setScore(score);
	}

	public static void main(String[] args){
		try{
			String queryDir = args[0];
			String resultDir = args[1];
			GloabalTfIdfLinking til ;
			if((queryDir!=null)&&(!queryDir.equals(""))&&( resultDir!=null)&&(!resultDir.equals(""))){
				til = new GloabalTfIdfLinking(queryDir,resultDir);
			}else{
				til	= new GloabalTfIdfLinking();
			}
			til.run();
		} catch (Exception e) {
			// TODO Auto-generated catch block
			e.printStackTrace();
		}
	}
}
