package utils;

import java.io.*;
import java.util.*;

public class ReadFile {
	 private static ArrayList<String> FileList = new ArrayList<String>(); // the list of file
	 
	 //read directory 
	 public static List<String> readDirs(String filepath){
		 //ArrayList<String> FileList = new ArrayList<String>();
		 FileList.clear();
		 try
	        {
	            File file = new File(filepath);
	            if(!file.isDirectory())
	            {
	                System.out.println("输入的参数应该为[文件夹名]");
	                System.out.println("filepath:" + file.getAbsolutePath());
	            }
	            else
	            {
	                String[] flist = file.list();
	                for(int i = 0; i < flist.length; i++)
	                {
	                    File newfile = new File(filepath + "\\" + flist[i]);
	                    if(!newfile.isDirectory())
	                    {
	                        FileList.add(newfile.getAbsolutePath());
	                    }
	                    else if(newfile.isDirectory()) //if file is a directory, call ReadDirs
	                    {
	                        readDirs(filepath + "\\" + flist[i]);
	                    }                    
	                }
	            }
	        }catch(Exception e)
	        {
	            System.out.println(e.getMessage());
	        }
	        return FileList;
	 }
	
	 //read file
	 public static String readFile(String path) throws FileNotFoundException, IOException{
		 StringBuffer strSb = new StringBuffer(); //String is constant， StringBuffer can be changed.
		 File file=new File(path);
		 InputStreamReader inStrR = new InputStreamReader(new FileInputStream(file), "UTF-8"); //byte streams to character streams
		 BufferedReader br = new BufferedReader(inStrR); 
		 String line = br.readLine();
		 while(line != null){
			 strSb.append(line);
		     line = br.readLine();    
		 }
		 br.close();       
		 return strSb.toString();
	 }
	 
	 //
	 public static String[] txtSplit(String str){
		String[] ss=null;
		if(!str.equalsIgnoreCase("")){
			ss=str.split(" ");
		}		
		return ss;
	 }
	 
	 //
	//term frequency in a file, times for each word
		public static HashMap<String, Integer> normalTF(String[] cutwords){
	        HashMap<String, Integer> resTF = new HashMap<String, Integer>();
	        
	        for(String word : cutwords){
	            if(resTF.get(word) == null){
	                resTF.put(word, 1);
	                //System.out.println(word);
	            }
	            else{
	                resTF.put(word, resTF.get(word) + 1);
	                //System.out.println(word.toString());
	            }
	        }
	        return resTF;
	    }
		
	    //term frequency in a file, frequency  for each word
	    public static HashMap<String, Float> tf(String[] cutwords){
	        HashMap<String, Float> resTF = new HashMap<String, Float>();
	        
	        int wordLen = cutwords.length;
	        HashMap<String, Integer> intTF = ReadFile.normalTF(cutwords); 
	        
	        Iterator iter = intTF.entrySet().iterator(); //iterator for that get from TF
	        while(iter.hasNext()){
	            Map.Entry entry = (Map.Entry)iter.next();
	            //String temp1=entry.getKey().toString();
	            //String temp2=entry.getValue().toString();
	            resTF.put(entry.getKey().toString(), Float.parseFloat(entry.getValue().toString()) / wordLen);
	            //System.out.println(entry.getKey().toString() + " = "+  Float.parseFloat(entry.getValue().toString()) / wordLen);
	        }
	        return resTF;
	    } 
	    
	    //tf times for file。文件及文件对应的哈希表，文件哈希表记录单词和单词出现的个数
	    public static HashMap<String, HashMap<String, Integer>> normalTFAllFiles(String dirc) throws IOException{
	        HashMap<String, HashMap<String, Integer>> allNormalTF = new HashMap<String, HashMap<String,Integer>>();
	        
	        List<String> filelist = ReadFile.readDirs(dirc);
	        for(String file : filelist){
	            HashMap<String, Integer> dict = new HashMap<String, Integer>();
	            String[] cutwords = Nlp.cutWords(file); //get cut word for one file
	            
	            dict = ReadFile.normalTF(cutwords);
	            allNormalTF.put(file, dict);
	        }    
	        return allNormalTF;
	    }
	    
	    //tf for all file
	    public static HashMap<String,HashMap<String, Float>> tfAllFiles(String dirc) throws IOException{
	        HashMap<String, HashMap<String, Float>> allTF = new HashMap<String, HashMap<String, Float>>();
	        List<String> filelistTemp = ReadFile.readDirs(dirc);
	        
	        for(String file : filelistTemp){
	            HashMap<String, Float> dict = new HashMap<String, Float>();
	            String[] cutwords = Nlp.cutWords(file); //get cut words for one file
	            String[] cutstopwords=Nlp.dropStopWords(cutwords);
	            dict = ReadFile.tf(cutstopwords);
	            allTF.put(file, dict);
	        }
	        return allTF;
	    }
	    
	    //calculate idf for each word in a file, save to a hashmap
	    public static HashMap<String, Float> idf(HashMap<String,HashMap<String, Float>> all_tf){
	        HashMap<String, Float> resIdf = new HashMap<String, Float>();
	        HashMap<String, Integer> dict = new HashMap<String, Integer>();
	        
	        int docNum = FileList.size();
	        
	        for(int i = 0; i < docNum; i++){
	            HashMap<String, Float> temp = all_tf.get(FileList.get(i));
	            Iterator iter = temp.entrySet().iterator();
	            while(iter.hasNext()){
	                Map.Entry entry = (Map.Entry)iter.next();
	                String word = entry.getKey().toString();
	                if(dict.get(word) == null){
	                    dict.put(word, 1);
	                }else {
	                    dict.put(word, dict.get(word) + 1);
	                }
	            }
	        }
	        //System.out.println("IDF for every word is:");
/*	        Iterator iter_dict = dict.entrySet().iterator();
	        while(iter_dict.hasNext()){
	            Map.Entry entry = (Map.Entry)iter_dict.next();
	            String temp1=entry.getKey().toString();
	            String temp2=entry.getValue().toString();
	            float value = (float)Math.log(docNum / Float.parseFloat(entry.getValue().toString()));
	            resIdf.put(entry.getKey().toString(), value);
	            System.out.println(entry.getKey().toString() + " = " + value);
	        }
*/
	        for(String key :dict.keySet()){
	        	String temp1=dict.get(key).toString();
	        	float value = (float)Math.log(docNum / Float.parseFloat(dict.get(key).toString()));
	        	resIdf.put(key, value);
	            //System.out.println(key + " = " + value);
	        }
	        return resIdf;
	    }
	    
	    //return the result
	    public static HashMap<String, TreeMap<String, Float>> tf_idf(HashMap<String,HashMap<String, Float>> all_tf,HashMap<String, Float> idfs){
	        HashMap<String, TreeMap<String, Float>> resTfIdf = new HashMap<String, TreeMap<String, Float>>();
	            
	        int docNum = FileList.size();
	        for(int i = 0; i < docNum; i++){
	            String filepath = FileList.get(i);
	            HashMap<String, Float> tfidf = new HashMap<String, Float>();
	            HashMap<String, Float> temp = all_tf.get(filepath);
	            Iterator iter = temp.entrySet().iterator();
	            while(iter.hasNext()){
	                Map.Entry entry = (Map.Entry)iter.next();
	                String word = entry.getKey().toString();
	                Float value = (float)Float.parseFloat(entry.getValue().toString()) * idfs.get(word); 
	                tfidf.put(word, value);
	            }
	            //tfidf= sortMapByValue(tfidf);
	            
	            resTfIdf.put(filepath, sortMapByValue(tfidf));
	        }
	        
	        return resTfIdf;
	        
	    }
	    
	   
	    
	    //sort map
	    public static TreeMap<String, Float> sortMapByValue(HashMap<String,Float> oriMap){
	    	if (oriMap == null || oriMap.isEmpty()) {  
	            return null;  
	        } 
	     	MapValueComparator mvc=new MapValueComparator(oriMap);
	    	TreeMap<String,Float> sortedMap=new TreeMap<String,Float>(mvc);
	    	sortedMap.putAll(oriMap);
	    	//HashMap<String,Float> sMap=new HashMap<String,Float>();
	    	//Iterator iter=sortedMap.entrySet().iterator();
	    /*	while(iter.hasNext()){
	    		Map.Entry<String, Float> entry=(Map.Entry<String, Float>)iter.next();
	    		//String temp1=entry.getKey().toString();
	    		//String temp2=entry.getValue().toString();
	    		sMap.put(entry.getKey(), entry.getValue());
	    	}
	    */
	    	return sortedMap;
	    
	    	
	    }
	    
	    //将经过tf-idf筛选后的特征值赋到一张HashMap中。return HashMap<String,HashMap<String,Integer>>
	    public static HashMap<String,HashMap<String,Integer>> selTfidf(HashMap<String, TreeMap<String, Float>> tfidf,HashMap<String, HashMap<String, Integer>> allNormalTF,int number){
	        
	        HashMap<String,HashMap<String,Integer>> selected=new HashMap<String,HashMap<String,Integer>>();
	        selected.clear();
	        HashMap<String, Integer> dict = new HashMap<String, Integer>();
	        TreeMap<String,Float> sort=new TreeMap<String,Float>();
	        
	        for(String file:tfidf.keySet()){
	        	
	        	//in the same file
	        	HashMap<String, Integer> newdict=new HashMap<String, Integer>();
	        	int count=1;
	        	dict=allNormalTF.get(file);
	        	sort=tfidf.get(file);
	        	for(String word:sort.keySet()){
	        		//String temp1=word;
	        		int temp2=dict.get(word);
	        		newdict.put(word, temp2);
	        		if(count>number){                       //特征项的数量 the number of feature item
	        			break;
	        		}
	        		count++;
	        	}	        	
	        	selected.put(file, newdict);
	        }
	        //Display.DisSelected(selected);			//用于中间结果的调试，打印筛选后的特征项及其次数
	        
	        return selected;
	    }
	    
	   

	    
//	    
}


class MapValueComparator implements Comparator<String> {  
	  
    Map<String, Float> base;  
    public MapValueComparator(Map<String, Float> base) {  
        this.base = base;  
    }  
  
    // Note: this comparator imposes orderings that are inconsistent with equals.      
    public int compare(String a, String b) {  
        if (base.get(a) >= base.get(b)) {  
            return -1;  
        } else {  
            return 1;  
        } // returning 0 would merge keys  
    }  
}  
