/*
package backup;

import java.io.BufferedReader;
import java.io.BufferedWriter;
import java.io.FileReader;
import java.io.FileWriter;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.TreeMap;

import modules.*;
import functions.*;


public class DocumentFunction {
    private static double ComputeNorm(Map<String, Double> termFrequencies){
    	
    	double norm = 0.0;
    	
    	for(Map.Entry<String, Double> entry : termFrequencies.entrySet()){
    		norm += entry.getValue() * entry.getValue();
    	}
    	return Math.sqrt(norm);
    	
    }
    public static double ComputeCosineSimilarity(Map<String, Double> termFrequencies1, Map<String, Double> termFrequencies2){
    	
    	double set1Norm = ComputeNorm(termFrequencies1);
        double set2Norm = ComputeNorm(termFrequencies2);

        double dotProduct = 0.0;
        for(Map.Entry<String, Double> termFrequency1 : termFrequencies1.entrySet()){
        	if(termFrequencies2.containsKey(termFrequency1.getKey())){
                dotProduct += termFrequency1.getValue() * termFrequencies2.get(termFrequency1.getKey());
        	}
        }
        return dotProduct / (set1Norm * set2Norm);
        
    }
    public static Map<String, Double> GetIDF(List<DocumentTerms> documents) throws Exception{
    	int totalCategory = 0;
        Map<String, Double> termCounts = new HashMap<String, Double>();
    	FileWriter writer = new FileWriter("D://Development//IDF.txt");
		BufferedWriter bw = new BufferedWriter(writer);
        for(DocumentTerms document : documents){
        	totalCategory++;
        	Map<String,Double> tf = document.GetTF();
        	for(Map.Entry<String, Double> term : tf.entrySet()){
        		if(termCounts.containsKey(term.getKey()))
        			termCounts.put(term.getKey(), termCounts.get(term.getKey()) + 1.0  );
        		else
        			termCounts.put(term.getKey(), 1.0);
        	}
        }
        Map<String, Double> idf = new HashMap<String, Double>();
        for(Map.Entry<String, Double> termCount : termCounts.entrySet()){
        	idf.put(termCount.getKey(), Math.log( totalCategory / (double)termCount.getValue() )); 	
        }
		DoubleValueComparator bvc = new DoubleValueComparator(idf);
		TreeMap<String, Double> tMap = new TreeMap<String, Double>(bvc);
		tMap.putAll(idf);
    	for(Map.Entry<String, Double> term : tMap.entrySet() ){
    		bw.write ( term.getKey() + "\t" + term.getValue() );
    		bw.newLine();
    	}
        bw.close();
        writer.close();
        
        return idf;
    }
    
	public static double logB(double x, double base) {
	    return Math.log(x) / Math.log(base);
	}
    
    public static Map<String, Double> GetIDFFW(List<DocumentTerms> documents, Map<String, Double> idf) throws Exception{
    	int totalCategory = 0;
        Map<String, Double> termCounts = new HashMap<String, Double>();
        
    	FileWriter writer = new FileWriter("D://Development//IDFFW.txt");
		BufferedWriter bw = new BufferedWriter(writer);

        for(DocumentTerms document : documents){
        	totalCategory++;
        	Map<String,Double> tf = document.GetTF();
        	
        	for(Map.Entry<String, Double> term : tf.entrySet()){
        		
        		if(termCounts.containsKey(term.getKey()))
        			termCounts.put(term.getKey(), termCounts.get(term.getKey()) + 1.0  );
        		else
        			termCounts.put(term.getKey(), 1.0);
        		
        	}        	
        }
        System.out.println(totalCategory);
        Map<String, Double> idffw = new HashMap<String, Double>();
        
        for(Map.Entry<String, Double> termCount : termCounts.entrySet()){
        	
        	//idffw.put (termCount.getKey(), 1.1 * Math.abs(logB((double)termCount.getValue() / (totalCategory*0.03) ,2)) );
        
        	idffw.put(termCount.getKey(),  idf.get(termCount.getKey()) -  1.1 * Math.abs(logB((double)termCount.getValue() / 30 ,2))  );
 	
        }
        
		DoubleValueComparator bvc = new DoubleValueComparator(idffw);
		TreeMap<String, Double> tMap = new TreeMap<String, Double>(bvc);
		tMap.putAll(idffw);
		
    	for(Map.Entry<String, Double> term : tMap.entrySet() ){
    		bw.write ( term.getKey() + "\t" + term.getValue() );
    		bw.newLine();

    	}
        
        bw.close();
        writer.close();
        
        return idffw;

    }
    
    
    public static Map<String, Double> GetIDF2(List<DocumentTerms> documents) throws Exception{
    	int totalCategory = 0;
    	
    	FileWriter writer = new FileWriter("D://Development//IDF2.txt");
		BufferedWriter bw = new BufferedWriter(writer);
		
    	Map<String, Double> termCounts = new HashMap<String, Double>();

        for(DocumentTerms document : documents){
        	totalCategory++;
        	Map<String,Double> tf = document.GetTF();
        	
        	for(Map.Entry<String, Double> term : tf.entrySet()){
        		
        		if(termCounts.containsKey(term.getKey()))
        			termCounts.put(term.getKey(), termCounts.get(term.getKey()) + term.getValue()  );
        		else
        			termCounts.put(term.getKey(), term.getValue());
        		
        	}        	
        }
        
        Map<String, Double> idf = new HashMap<String, Double>();
        
        for(Map.Entry<String, Double> termCount : termCounts.entrySet()){
        
        	idf.put(termCount.getKey(), Math.log10( totalCategory / (double)termCount.getValue() ));
        }
        
		DoubleValueComparator bvc = new DoubleValueComparator(idf);
		TreeMap<String, Double> tMap = new TreeMap<String, Double>(bvc);
		tMap.putAll(idf);
		
    	for(Map.Entry<String, Double> term : tMap.entrySet() ){
    		bw.write ( term.getKey() + "\t" + term.getValue() );
    		bw.newLine();
    	}
        
        bw.close();
        writer.close();
        
        return idf;

        
    	
    }
    
    public static Map<String, Double> GetIDF3(List<DocumentTerms> documents) throws Exception{
    	int totalCategory = 0;
    	
    	FileWriter writer = new FileWriter("D://Development//IDF2.txt");
		BufferedWriter bw = new BufferedWriter(writer);
		
    	Map<String, Double> termCounts = new HashMap<String, Double>();

        
    	
    	for(DocumentTerms document : documents){
        	totalCategory++;
        	Map<String,Double> tf = document.GetTF();
        	
        	for(Map.Entry<String, Double> term : tf.entrySet()){
        		
        		if(termCounts.containsKey(term.getKey()))
        			termCounts.put(term.getKey(), termCounts.get(term.getKey()) + term.getValue()  );
        		else
        			termCounts.put(term.getKey(), term.getValue());
        		
        	}        	
        }
        
        
        
        Map<String, Double> idf = new HashMap<String, Double>();
        
        for(Map.Entry<String, Double> termCount : termCounts.entrySet()){
        
        	idf.put(termCount.getKey(), Math.log10( totalCategory / (double)termCount.getValue() ));
        }
        
		DoubleValueComparator bvc = new DoubleValueComparator(idf);
		TreeMap<String, Double> tMap = new TreeMap<String, Double>(bvc);
		tMap.putAll(idf);
		
    	for(Map.Entry<String, Double> term : tMap.entrySet() ){
    		bw.write ( term.getKey() + "\t" + term.getValue() );
    		bw.newLine();
    	}
        
        bw.close();
        writer.close();
        
        return idf;
    	
    }
    
    public static Map<String, Double> GetTFIDF(Map<String, Double> TFs, Map<String, Double> IDFs){
    	
        Map<String, Double> tfidf = new HashMap<String, Double>();
        for(Map.Entry<String, Double> term : TFs.entrySet()){
        	if(IDFs.containsKey(term.getKey()))
        		tfidf.put(term.getKey(), term.getValue() * IDFs.get(term.getKey()) );
        }
        
        return tfidf;

    }
    
    public static Map<String, Double> GetNTFIDF(Map<String, Double> TFs, Map<String, Double> IDFs){
        Map<String, Double> tfidf = new HashMap<String, Double>();
        for(Map.Entry<String, Double> term : TFs.entrySet()){
        	
        	if(IDFs.containsKey(term.getKey())){
        		
        		//System.out.println( "TFIDF:" + String.format("%.6f", ( Math.log(term.getValue()) + 1.0) * IDFs.get(term.getKey())) ) ;
        		
        		tfidf.put(term.getKey(), ( Math.log(term.getValue() + 1.0)) * IDFs.get(term.getKey()) );
        	}
        	
        }
        
        return tfidf;

    }
    
    
    public static Map<String, Double> GetTFIDF_C(Map<String, Double> TFs, Map<String, Double> IDFs, Map<String, Double> Cs, double scale){
    	
        Map<String, Double> tfidfc = new HashMap<String, Double>();
        
        double cscore = 0.0;
        
        for(Map.Entry<String, Double> term : TFs.entrySet()){
        	
        	if(IDFs.containsKey(term.getKey()) && Cs.containsKey(term.getKey()) ){
        		
        		if(Cs.get(term.getKey()) > 1.0){
        			cscore = ((Cs.get(term.getKey()) - 1.0)/scale)+1.0;
        		}else{
        			cscore = 1.0-(1.0-(Cs.get(term.getKey())))/scale;	
        		}
        		//if( cscore <= 1.0 ) cscore = 1.0;
        		tfidfc.put(term.getKey(), term.getValue() * IDFs.get(term.getKey()) * cscore);
        		
        		//tfidfc.put(term.getKey(), term.getValue() * IDFs.get(term.getKey()));
        		
        	}else if(IDFs.containsKey(term.getKey()) ){
        		 tfidfc.put(term.getKey(), term.getValue() * IDFs.get(term.getKey())  );

        	}
        	
        }
        
        return tfidfc;

    }
    
    public static Map<String, Double> GetCombinedDocumentsFrequency(DocumentTerms[] documents, double[] weights){
    	
    	if(documents.length != weights.length)
    		return null;
    	
        Map<String, Double> frequencies = new HashMap<String, Double>();
        
        for(int i =0; i<documents.length; i++){
        	
        	Map<String,Double> termFrequencies = documents[i].GetTF();
        	
            for(Map.Entry<String, Double> term : termFrequencies.entrySet()){
            	
            	if(frequencies.containsKey(term.getKey()))
            		frequencies.put(term.getKey(), frequencies.get(term.getKey()) + ( term.getValue()* weights[i]));
            	else
            		frequencies.put(term.getKey(), term.getValue() * weights[i]);
            	
            }
        	
        }
        
        return frequencies;

    }
 
}

*/
