package tools;

import java.io.BufferedWriter;
import java.io.File;
import java.io.FileWriter;
import java.io.IOException;
import java.util.Map;

import org.json.JSONException;

import entities.Entity;

import naive2.BoW;
import naive2.Json2BoWStop;
import naive2.JsonEntity;
import naive2.LuceneJsonRetriever;


//use stop word to calculate the idf(not confused by the tf) of all the json documents
public class TfIdfCalculator {
	//BoW stopbow;
	//the directory to write the idf information
	String dir;
	private LuceneJsonRetriever lj = null;
	private LuceneIndexReader lr = null;
	private Json2BoWStop j2b = null;
	
	public TfIdfCalculator(String dir, LuceneJsonRetriever lj){
	     this.dir = dir;
	     this.lj =  lj;
	     this.lr = lj.getReader();
	     this.j2b = new Json2BoWStop(null);
	     
	}
	
	public static BoW idfCount(String dir){
		try {
			String content = DocRetriever.getFileContents(dir);
			String[] ss = content.split("\n");
			
			BoW idfCount = new BoW();
			for(String line: ss){
				String[] pieces = line.split("\t");
				String word = pieces[0];
				int freq = Integer.parseInt(pieces[1]);
				for( int i=0;i<freq;i++){
					idfCount.putWord(word);
				}
			}
			System.out.println("loading idf finished");
			return idfCount;
		} catch (Exception e) {
			// TODO Auto-generated catch block
			e.printStackTrace();
		}
		return null;
	}
	public void run() throws JSONException{
		
		this.lr.queryEntityJSONIndex("Michael Jordan");//make lr initate name2id
    	Map<String,Integer> name2id = this.lr.name2id;
    	
    	int size = this.lr.name2id.size();
    	int count = 0;
    	BoW idf_count = new BoW();
    	
    	//int limit = 2;
    	
    	//calcuate idf count
    	for( String key: name2id.keySet() ){
    		System.out.println("calcuate"+key);
    
    		JsonEntity je = JsonEntity.instance(new Entity(key), lj);
    		BoW jbow = this.j2b.jentity2bow(je);
    		
    		for( String term: jbow.getbag().keySet()){
    			idf_count.putWord(term);
    		}
    		
    		count++;
    	    System.out.println("current:"+count+"/"+size);
    	
    		
    		//if(count>=limit){
    		//	break;
    	//	}
    	}
    	
    	
    	
    	//write to file;
    	
    	File stopwordsFile = new File(this.dir);
        BufferedWriter output;
		try {
			output = new BufferedWriter( new FileWriter(stopwordsFile) );
			
			for( String key: idf_count.getbag().keySet()){
				int freq = idf_count.getFreq(key);
				String line = "";
				line = line + key+"\t"+freq+"\n";
				output.write(line);
		    }
		        
		    output.close();
		} catch (IOException e) {
			// TODO Auto-generated catch block
			e.printStackTrace();
		}
        
     
	}
	
	public static void main(String[] args) throws IOException, JSONException{
	
		/*LuceneJsonRetriever lj = new LuceneJsonRetriever("/largedata1/cmput696/students/lyao1/lemmaIndex");
		TfIdfCalculator tl = new TfIdfCalculator("/largedata1/cmput696/students/lyao1/idfcount.txt",lj);
		tl.run();*/
		
		//int I = Integer.parseInt("12");
		//System.out.println(I);
		BoW idf_count = TfIdfCalculator.idfCount("/largedata1/cmput696/students/lyao1/idfcount.txt");
		System.out.println("loading idf finished");
		System.out.println("total words length"+idf_count.getTotalWords());
		System.out.println("total diffrent words" + idf_count.getbag().keySet().size());
		System.out.println("georgia:"+idf_count.getFreq("georgia"));
		System.out.println("lori:"+idf_count.getFreq("lori"));
		System.out.println("thomasville"+idf_count.getFreq("thomasville"));
		
	}
	
	
}
