package org.nlp.algo.feature.ig;

import java.io.BufferedWriter;
import java.io.File;
import java.io.FileFilter;
import java.io.FileWriter;
import java.io.IOException;
import java.io.StringReader;
import java.util.ArrayList;
import java.util.Collections;
import java.util.Comparator;
import java.util.HashMap;
import java.util.HashSet;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import java.util.Map.Entry;
import java.util.Set;

import org.apache.commons.io.FileUtils;
import org.apache.lucene.analysis.TokenStream;
import org.apache.lucene.analysis.tokenattributes.CharTermAttribute;
import org.apache.lucene.util.Version;
import org.nlp.lucene.BloomAnalyzer;

public class FeatureIG {

	/**
	 * @author huili
	 * 信息增益的算法实现  
	 */
	public static Set<String> wordsSet=new HashSet<String>();
	public static Set<String> classSet=new HashSet<String>();
	
	//特征存放路径		
	private static String featurePath="resource/feature/";
//	public static Map<String,Integer> pciMap=new HashMap<String,Integer>();
	public static HashMap<String,HashMap<String,Set<String>>> ptMap=new HashMap<String,HashMap<String,Set<String>>>();
	public static  int docNum=0;
	public static void main(String[] args) throws IOException {
		File file = new File("sougo");
		File[] tranTypes = file.listFiles();
		for (int i = 0; i < tranTypes.length; i++) 
		{
			if (tranTypes[i].isDirectory()
					&& !tranTypes[i].getName().startsWith("."))
			{
				System.out.println(tranTypes[i]);
				comOneType(tranTypes[i]);
			}
		}

		File f=new File(featurePath+"shl.txt");
		BufferedWriter b=new BufferedWriter(new FileWriter(f));
		b.write(ptMap.toString());
		b.close();


			Iterator<String> itw=wordsSet.iterator();
			HashMap<String,Double> pair=new HashMap<String,Double>();

			while(itw.hasNext())
			{
				String wordName=itw.next();
				double igV=GetIG(wordName);
				pair.put(wordName, igV);
			}
			
			// 排序 
			List<Map.Entry<String, Double>> info=SortPair2(pair);
			File fw=new File(featurePath+"iG.txt");
			if(fw.exists())
				fw.delete();
			else
				fw.createNewFile();
			BufferedWriter bw=new BufferedWriter(new FileWriter(fw));
			for (int i = 0; i < info.size(); i++) { 
				String id = info.get(i).toString(); 
				bw.write(id+"\r\n");
				}
			bw.close();
		System.out.println("运行完成！");
	}

	//读取文件并分词，填加到多对多的集合中
	private static void comOneType(File file) throws IOException 
	{		
		String type = file.getName();
		File[] models = file.listFiles(new FileFilter() {
			@Override
			public boolean accept(File pathname) {				
				if (pathname.getName().endsWith(".txt")) {
					return true;
				}
				return false;
			}
		});		
		
		BloomAnalyzer analyzer = new BloomAnalyzer(Version.LUCENE_40); //直接用bloomAnalyzer类
		HashMap<String,Set<String>> hsl=new HashMap<String,Set<String>>();
		hsl.clear();
		//Analyzer analyzer=new IKAnalyzer();
		for (int e = 0; e < models.length; e++) {
			String str = FileUtils.readFileToString(models[e],"GBK");
//			FeatureAnalyzer analy = new FeatureAnalyzer();
			
			TokenStream token = analyzer.tokenStream("", new StringReader(
					str));
			CharTermAttribute term = token
					.addAttribute(CharTermAttribute.class);
			Set<String> wordList=new HashSet<String>();
			wordList.clear();
			docNum++;
			while (token.incrementToken()) {
				String word = term.toString();			
				//System.out.println(word);
				wordList.add(word);
				wordsSet.add(word);
			}
			hsl.put(models[e].getName(),wordList);
		}
		ptMap.put(file.getName(),hsl);
		classSet.add(file.getName());
		analyzer.close();
		
	}

	public static double  GetIG(String wordName)
	{
		double wordIG=0.0;
		double pcitM=0.0;
		double pcit_M=0.0;
		double pciM=0.0;
		//获得P(t)值
		int ptNum=GetPtNum(wordName);
		double ptV=(double)ptNum/((double)docNum);
		
		double pt_V=1.0-ptV;
		
		Iterator<String> itT=classSet.iterator();
		while(itT.hasNext())
		{
			String typeName=itT.next();
			//获得P(Ci)值
			double pciV=GetPci(typeName);
			
			//获得P(Ci|t)值
			double pcitV=GetPcit(ptNum,typeName,wordName);

			//double pcit_V=1.0-pcitV;
			double pcit_V=GetPci_t(ptNum,typeName,wordName);
			
			double logPci=0.0;
			if(pciV>0)
				logPci=Math.log10(pciV)/Math.log10(2);
			double logpcit=0.0;
			if(pcitV>0)
				logpcit=Math.log10(pcitV)/Math.log10(2);
			double logpcit_V=0.0;
			if(pcit_V>0)
				logpcit_V=Math.log10(pcit_V)/Math.log10(2);
			
			pciM+=-pciV*logPci;
			pcit_M+=pcit_V*logpcit_V;
			pcitM+=pcitV*logpcit;
		}

		wordIG=ptV*pcitM+pt_V*pcit_M+pciM;
		return wordIG;
	} 
	
	//获得P(Ci)值
	private static double GetPci(String typeName)
	{
		int docCNum=ptMap.get(typeName).size();
		double picV=((double)docCNum)/((double)docNum);
		return picV;
	}
	//获得P(t)值
	private static int GetPtNum(String wordName)
	{
		int docTNum=0;
		Iterator<Entry<String,HashMap<String,Set<String>>>> it=ptMap.entrySet().iterator();
		while(it.hasNext())
		{
			Entry<String,HashMap<String,Set<String>>> enC=it.next();
			Iterator<Entry<String , Set<String>>>  docWord=enC.getValue().entrySet().iterator();
			while(docWord.hasNext())
			{
				Entry<String,Set<String>> enW=docWord.next();
				if(enW.getValue().contains(wordName))
				{
					docTNum++;
				}
			}
		}
		return docTNum;
	}
	//获取P(ci|t)
	private static double GetPcit(int ptNum,String typeName,String wordName)
	{
		HashMap<String,Set<String>> docWord=ptMap.get(typeName);
		int num=0;
		Iterator<Entry<String,Set<String>>> it=docWord.entrySet().iterator();
		while(it.hasNext())
		{
			Entry<String,Set<String>>  itE=it.next();
			if(itE.getValue().contains(wordName))
			{
				num++;
			}
		}
		double pcitV=(double)num/((double)ptNum);
		return pcitV;
	}
	//获取P(ci|_t)
		private static double GetPci_t(int ptNum,String typeName,String wordName)
		{
			HashMap<String,Set<String>> docWord=ptMap.get(typeName);
			int num=0;
			Iterator<Entry<String,Set<String>>> it=docWord.entrySet().iterator();
			while(it.hasNext())
			{
				Entry<String,Set<String>>  itE=it.next();
				if(!itE.getValue().contains(wordName))
				{
					num++;
				}
			}
			double pcit_V=(double)num/((double)(docNum-ptNum));
			return pcit_V;
		}
	 //排序
		public static List<Map.Entry<String, Double>> SortPair2(HashMap<String,Double> listPair)
		{
			List<Map.Entry<String, Double>> infoIds = new ArrayList<Map.Entry<String, Double>>( listPair.entrySet()); 
			Collections.sort(infoIds, new Comparator<Map.Entry<String, Double>>() { 
				public int compare(Map.Entry<String,Double> o1,Map.Entry<String,Double> o2)
				{
				if (o2.getValue() > o1.getValue())
				{
				return 1;
				}
				else if (o1.getValue() > o2.getValue())
				{
				return -1;
				}
				else
				{
				return 0;
				}
				}
				}); 
			return infoIds;
		}
		
}
