package hbase.classification;

import java.io.ByteArrayInputStream;
import java.io.IOException;
import java.io.ObjectInputStream;
import java.sql.ResultSet;
import java.sql.SQLException;
import java.util.ArrayList;
import java.util.Calendar;
import java.util.Date;
import java.util.HashMap;
import java.util.HashSet;
import java.util.Hashtable;
import java.util.List;
import java.util.Map;
import java.util.Map.Entry;
import java.util.NavigableMap;
import java.util.Set;
import java.util.StringTokenizer;


import objects.base.Bracketing;

import org.apache.commons.lang.ArrayUtils;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.HBaseConfiguration;
import org.apache.hadoop.hbase.KeyValue;
import org.apache.hadoop.hbase.client.Get;
import org.apache.hadoop.hbase.client.HTable;
import org.apache.hadoop.hbase.client.Put;
import org.apache.hadoop.hbase.client.Result;
import org.apache.hadoop.hbase.client.ResultScanner;
import org.apache.hadoop.hbase.client.Scan;
import org.apache.hadoop.hbase.util.Bytes;



import com.mysql.jdbc.log.LogUtils;

import stemming.tools.Tools;
import utils.mathCalc.Calculations;
import utils.primitif.ArrayListUtils;
import utils.primitif.HashUtils;
import utils.primitif.StringUtils;


/*** @author Gary Benattar
 * TABLE LIST
 * hbase(main):003:0> list
 * 
		countDocDate
		dateToTopic
		docDate
		docWeight
		frequency
		frequencyAllEvent*
		opinions*
		topic*
		trends*
		trendsAllEvent*
		wordCount*
		wordFreq
		zscore*
		zscoreAllEvent*


 *		Action DO TO
		hbase(main):006:0> truncate 'trendsAllEvent'
		hbase(main):007:0> truncate 'zscoreAllEvent'
		hbase(main):009:0> truncate 'opinions'
		hbase(main):010:0> truncate 'topic'
		hbase(main):011:0> truncate 'wordCount'
		hbase(main):011:0> truncate 'frequencyAllEvent'.


		hbase(main):002:0> truncate 'topic'
		Truncating 'topic' table (it may take a while):
		 - Disabling table...
		 - Dropping table...
		 - Creating table...
		0 row(s) in 7.8020 seconds

		hbase(main):003:0> truncate 'opinions'
		Truncating 'opinions' table (it may take a while):
		 - Disabling table...
		 - Dropping table...
		 - Creating table...
		0 row(s) in 7.2270 seconds

		hbase(main):004:0> truncate 'wordCount'
		Truncating 'wordCount' table (it may take a while):
		 - Disabling table...
		 - Dropping table...
		 - Creating table...
		0 row(s) in 8.4420 seconds

		hbase(main):005:0> truncate 'frequencyAllEvent'
		Truncating 'frequencyAllEvent' table (it may take a while):
		 - Disabling table...
		 - Dropping table...
		 - Creating table...
		0 row(s) in 7.2220 seconds

		hbase(main):006:0> truncate 'stem'
		Truncating 'stem' table (it may take a while):
		 - Disabling table...
		 - Dropping table...
		 - Creating table...
		0 row(s) in 7.6590 seconds

		hbase(main):007:0> truncate 'dateToTopic'
		Truncating 'dateToTopic' table (it may take a while):
		 - Disabling table...
		 - Dropping table...
		 - Creating table...
		0 row(s) in 6.8800 seconds

		hbase(main):008:0> truncate 'date'
		Truncating 'date' table (it may take a while):
		 - Disabling table...
		 - Dropping table...
		 - Creating table...
		0 row(s) in 7.6530 seconds

		hbase(main):009:0>

 *
 */
public class classification {
	private static Configuration config = HBaseConfiguration.create();

	public static int getNumberOfDayForTheOverallPeriod() throws IOException, ClassNotFoundException, SQLException{
		int cpt = 0;	
		HTable tableDate = new HTable(config, "date");
		ResultScanner scanner = tableDate.getScanner(new Scan());
		for (Result res = scanner.next(); res != null; res = scanner.next()) {cpt++;}
		tableDate.close();
		return cpt;
	}

	/**
	 * Get the number of topic at the date d
	 * @param d
	 * @return
	 * @throws IOException
	 * @throws ClassNotFoundException
	 * @throws SQLException
	 */
	public static int getNumberOfTopicPerDate(HTable tableTopic, String date) throws IOException, ClassNotFoundException, SQLException{
		Result res = tableTopic.get(new Get(Bytes.toBytes(date)));
		if(!res.isEmpty())
			return res.list().size();
		return 0;
	}

	/**
	 * Get the number of opinion at the date d
	 * @param d
	 * @return
	 * @throws IOException
	 * @throws ClassNotFoundException
	 * @throws SQLException
	 */
	public static int getNumberOfOpinionPerDate(HTable tableOpinions, String date) throws IOException, ClassNotFoundException, SQLException{
		Result res = tableOpinions.get(new Get(Bytes.toBytes(date)));
		if(!res.isEmpty())
			return res.list().size();
		return 0;
	}

	/**
	 * Get the number of polls at the date d
	 * @param d
	 * @return
	 * @throws IOException
	 * @throws ClassNotFoundException
	 * @throws SQLException
	 */
	public static int getNumberOfPollsPerDate(HTable tablePolls, String date) throws IOException, ClassNotFoundException, SQLException{
		Result res = tablePolls.get(new Get(Bytes.toBytes(date)));
		if(!res.isEmpty())
			return res.list().size();
		return 0;
	}

	/**
	 * Store all the word with the date of creation foreach topic title - increment value if exists
	 * Stop the stem and non stemming word for display only
	 * @param wordCount
	 * @param sTitle
	 * @param date
	 * @throws IOException
	 * @throws ClassNotFoundException
	 * @throws SQLException
	 */
	public static void storeWordOccurenceInWordCountTable(HTable tableStem, HTable wordCount, String sTitle, String date) throws IOException, ClassNotFoundException, SQLException{
		if(sTitle != null && !sTitle.equals("") && !sTitle.equals(" ")){
			byte[] dateFamily = Bytes.toBytes("date");
			byte[] stemFamily = Bytes.toBytes("stem");
			byte[] dateQualifier = Bytes.toBytes(date);

			String[] sTokenize = sTitle.split(" ");
			for(int i = 0; i < sTokenize.length; i++) {
				String pendingToken = sTokenize[i];
				String cleanString = utils.primitif.StringUtils.removeBadChar(pendingToken.toLowerCase());
				String token = Tools.stem(cleanString);
				tableStem.put(new Put(Bytes.toBytes(token)).add(stemFamily, Bytes.toBytes(token), Bytes.toBytes(cleanString)));
				wordCount.incrementColumnValue(Bytes.toBytes(token), dateFamily, dateQualifier, 1);
			}
		}
	}

	/**
	 * Count all the collocation in a Topic title, store it in hbase
	 * @param tableCollocationCount
	 * @param sTitle
	 * @param date
	 * @throws IOException
	 * @throws ClassNotFoundException
	 * @throws SQLException
	 */
	public static void storeCollocationOccurenceForTopic(HTable wordCount, HTable collocationsTable, String sTitle, String date) throws IOException, ClassNotFoundException, SQLException{
		if(sTitle != null && !sTitle.equals("") && !sTitle.equals(" ")){
			byte[] dateFamily = Bytes.toBytes("date");
			byte[] dateQualifier = Bytes.toBytes(date);
			Hashtable<String,Integer> collocation = getCollocation(sTitle, 5);
			Set<String> colEntry = collocation.keySet();
			for(String s : colEntry){
				wordCount.incrementColumnValue(Bytes.toBytes(s), dateFamily, dateQualifier, 1);
				collocationsTable.put(new Put(Bytes.toBytes(s)).add(dateFamily, dateQualifier, Bytes.toBytes(collocation.get(s))));
			}
		}
	}

	/**
	 * Get all collocation and add it inside an hashtable <Collocation,distance from the parent word>
	 * @param s
	 * @param window
	 * @return the hashtable <Collocation,distance from the parent word>
	 */
	public static Hashtable<String,Integer> getCollocation(String s, int window){
		Hashtable<String,Integer> collocations = new Hashtable<String,Integer>();
		Hashtable<String,String> stemString = new Hashtable<String, String>();
		String[] sArray = s.split(" ");
		for(int i = 0; i < sArray.length; i++){
			int currentWindow = window;
			if((sArray.length - i) < window){
				currentWindow = sArray.length - i;
				if(currentWindow < 2){
					break;
				}
			}
			if(Tools.isStopWord(sArray[i]))
				continue;
			if(!stemString.containsKey(sArray[i])){
				stemString.put(sArray[i],Tools.stem(utils.primitif.StringUtils.removeBadChar(sArray[i])).toLowerCase());
			}
			for(int j = i + 1; j < currentWindow; j++){
				if(Tools.isStopWord(sArray[j]))
					continue;
				if(!stemString.containsKey(sArray[j])){
					stemString.put(sArray[j],Tools.stem(utils.primitif.StringUtils.removeBadChar(sArray[j])).toLowerCase());
				}
				collocations.put((stemString.get(sArray[i]).concat(" " + stemString.get(sArray[j]))),j-i);
			}
		}
		return collocations;
	}

	/**
	 * Analyse an opinion for a given date - then reemit the parent topic
	 * @param wordCount
	 * @param sTopicId
	 * @param date
	 * @throws IOException
	 * @throws ClassNotFoundException
	 * @throws SQLException
	 */
	public static void storeWordOccurenceForOpinion(HTable tableStem, HTable wordCount, HTable tableTopic, String sTopicId, String date) throws IOException, ClassNotFoundException, SQLException{
		if(sTopicId != null && !sTopicId.equals("") && !sTopicId.equals(" ")){
			Get get  = new Get(Bytes.toBytes(sTopicId));
			if(tableTopic.exists(get)){
				List<KeyValue> value = tableTopic.get(get).list();
				storeWordOccurenceInWordCountTable(tableStem, wordCount,Bytes.toString(value.get(0).getValue()),date);
			}
		}
	}

	/**
	 * Parse topic and classify by incrementing values by date
	 * @throws IOException
	 * @throws ClassNotFoundException
	 * @throws SQLException
	 */
	public static void parseTopicAndClassifyWord() throws IOException, ClassNotFoundException, SQLException{
		HTable tableTopic = new HTable(config, "topic");
		HTable wordCount = new HTable(config, "wordCount");
		HTable tableStem = new HTable(config, "stem");
		HTable collocationsTable = new HTable(config, "collocations");
		byte[] dateFamily = Bytes.toBytes("date");
		ResultScanner scanner = tableTopic.getScanner(new Scan());
		int block = 0;
		System.out.println("[START] Analyse element in parseTopicAndClassifyWord");
		for (Result res = scanner.next(); res != null; res = scanner.next()) {
			NavigableMap<byte[], byte[]> mapNav = res.getFamilyMap(dateFamily);
			byte[] dateV = null;
			for(Entry<byte[], byte[]> columnFamilyEntry : mapNav.entrySet()){
				dateV = columnFamilyEntry.getKey();
			}
			block++;
			if(dateV != null){
				storeWordOccurenceInWordCountTable(tableStem,wordCount,Bytes.toString(res.value()),Bytes.toString(dateV));
				//storeCollocationOccurenceForTopic(wordCount,collocationsTable, Bytes.toString(res.value()), Bytes.toString(dateV));

			}
			if(block % 1000 == 0)
				System.out.println(block);
		}
		System.out.println("[END] Analyse element in parseTopicAndClassifyWord");
		collocationsTable.close();
		tableTopic.close();
		wordCount.close();
		tableStem.close();
	}

	/**
	 * Parse opinion and classify by taking the related topic and incrementing values by date, it means like the topic is re publish
	 * @throws IOException
	 * @throws ClassNotFoundException
	 * @throws SQLException
	 */
	public static void parseOpinionAndClassifyWord() throws IOException, ClassNotFoundException, SQLException{
		HTable tableOpinions = new HTable(config, "opinions");
		HTable wordCount = new HTable(config, "wordCount");
		HTable tableTopic = new HTable(config, "topic");
		HTable tableStem = new HTable(config, "stem");
		byte[] topicFamily = Bytes.toBytes("topicId");
		ResultScanner scanner = tableOpinions.getScanner(new Scan());
		int block = 0;
		System.out.println("[START] Analyse element in parseOpinionAndClassifyWord");
		for (Result res = scanner.next(); res != null; res = scanner.next()) {
			NavigableMap<byte[], byte[]> mapNav = res.getFamilyMap(topicFamily);

			byte[] topicId = null;
			for(Entry<byte[], byte[]> columnFamilyEntry : mapNav.entrySet()){
				topicId = columnFamilyEntry.getKey();
				if(topicId != null)
					storeWordOccurenceForOpinion(tableStem,wordCount,tableTopic,Bytes.toString(topicId),Bytes.toString(res.getRow()));
				block++;
				System.out.println(Bytes.toString(topicId) + " | date = " + Bytes.toString(res.getRow()));
			}
		}
		System.out.println("[END] Analyse element in parseOpinionAndClassifyWord");
		tableTopic.close();
		tableOpinions.close();
		wordCount.close();
	}


	/**
	 * Parse polls and classify by incrementing values by date
	 * @throws IOException
	 * @throws ClassNotFoundException
	 * @throws SQLException
	 */
	public static void parsePollsAndClassifyWord() throws IOException, ClassNotFoundException, SQLException{
		HTable tablePolls = new HTable(config, "polls");
		HTable wordCount = new HTable(config, "wordCount");
		HTable tableStem = new HTable(config, "stem");
		ResultScanner scanner = tablePolls.getScanner(new Scan());
		int block = 0;
		System.out.println("[START] Analyse element in parsePollsAndClassifyWord");
		for (Result res = scanner.next(); res != null; res = scanner.next()) {
			byte[] dateV = res.getRow();
			if(dateV != null){
				for(KeyValue kv : res.list()){
					storeWordOccurenceInWordCountTable(tableStem,wordCount,Bytes.toString(kv.getValue()),Bytes.toString(kv.getRow()));
					/*System.out.println(Bytes.toString(kv.getRow()));
					System.out.println(Bytes.toString(kv.getQualifier()));
					System.out.println(Bytes.toString(kv.getValue()));
					System.out.println(Bytes.toString(dateV));*/
					block++;
				}

				//storeCollocationOccurenceForTopic(wordCount,collocationsTable, Bytes.toString(res.value()), Bytes.toString(dateV));

			}
			if(block % 1000 == 0)
				System.out.println(block);
		}
		System.out.println("[END] Analyse element in parsePollsAndClassifyWord");
		tablePolls.close();
		wordCount.close();
		tableStem.close();
	}


	/**
	 * Caculate frequencies for (w,date D) based on event count
	 * @throws IOException
	 * @throws ClassNotFoundException
	 * @throws SQLException
	 */
	public static void calculateFrequenciesBasedOnWordCountTable() throws IOException, ClassNotFoundException, SQLException{
		HTable tableOpinions = new HTable(config, "opinions");
		HTable tablePolls = new HTable(config, "polls");
		HTable tableDateToTopic = new HTable(config, "dateToTopic");
		HTable frequencyAllEvent = new HTable(config, "frequencyAllEvent");
		HTable wordCount = new HTable(config, "wordCount");
		byte[] dateFamily = Bytes.toBytes("date");
		ResultScanner scanner = wordCount.getScanner(new Scan());
		Hashtable<String, Integer> dividerPerDate = new Hashtable<String, Integer>();
		int block = 0;
		int divider;
		for (Result res = scanner.next(); res != null; res = scanner.next()) {
			List<KeyValue> value = res.list();
			for(KeyValue keyValue: value){
				String qualifier = Bytes.toString(keyValue.getQualifier());
				if(dividerPerDate.containsKey(qualifier)){
					divider = dividerPerDate.get(qualifier);
				}else{
					divider = getNumberOfTopicPerDate(tableDateToTopic,qualifier) + getNumberOfOpinionPerDate(tableOpinions, qualifier) 
					+ getNumberOfPollsPerDate(tablePolls, qualifier);
					dividerPerDate.put(qualifier, divider);
				}
				double frequency = Bytes.toLong(keyValue.getValue()) / (double)(divider);
				frequencyAllEvent.put(new Put(keyValue.getRow()).add(dateFamily, keyValue.getQualifier(), Bytes.toBytes(String.valueOf(frequency))));
				block++;
			}

			System.out.println("Analyse element " + block + " in calculateFrequenciesBasedOnWordCountTable");
		}
		frequencyAllEvent.close();
		tableDateToTopic.close();
		tablePolls.close();
		tableOpinions.close();
		wordCount.close();
	}

	/**
	 * Caculate frequencies for (w,date D) based on event count
	 * @throws IOException
	 * @throws ClassNotFoundException
	 * @throws SQLException
	 */

	public static void calculateZScoreBasedOnWordCountTable() throws IOException, ClassNotFoundException, SQLException{
		HTable frequencyAllEvent = new HTable(config, "frequencyAllEvent");
		HTable zscoreAllEvent = new HTable(config, "zscoreAllEvent");
		HTable trendAllEvent = new HTable(config, "trendsAllEvent");
		HTable tableTopic = new HTable(config, "topic");
		HTable tablePolls = new HTable(config, "polls");
		HTable tableStem = new HTable(config, "stem");
		HTable tableDate = new HTable(config, "date");
		HTable searchOpinion = new HTable(config, "opinions");
		byte[] stemFamily = Bytes.toBytes("stem");
		Date dFirst = Calculations.oldDateForTheOverallPeriod();
		ResultScanner scanner = frequencyAllEvent.getScanner(new Scan());
		int nbDate = getNumberOfDayForTheOverallPeriod();
		int block = 0;
		boolean trend = false;
		for (Result res = scanner.next(); res != null; res = scanner.next()) {
			//System.out.println((++nbResult));
			String row = Bytes.toString(res.getRow());
			if(!row.equals("") && !row.equals(" ") && !Tools.isStopWord(row) && !utils.primitif.StringUtils.startWithWhiteSpace(row)
					&& row.length() > 2){
				if(row.contains(" ")) continue;
				List<KeyValue> value = res.list();
				int numberOfOcc = res.size();
				double proportion =(numberOfOcc/(double)nbDate) * 100;
				ArrayList<Double> arrFreq = new ArrayList<Double>();
				ArrayList<Double> arrZscore = new ArrayList<Double>();
				ArrayList<Date> arrDate = new ArrayList<Date>();
				arrDate.add(dFirst);
				Calendar cal = Calendar.getInstance();
				for(KeyValue keyValue: value){

					Date currentDate = utils.primitif.StringUtils.getDateFromString(keyValue.getQualifier());
					Date old = arrDate.get(arrDate.size() - 1);
					double nbDaysWithoutValues = Calculations.daysDifferences(currentDate, old);

					//System.out.println("dissapear : " + nbDaysWithoutValues);

					for(int i = 1; i < (int)nbDaysWithoutValues; i++){
						arrFreq.add((double) 0);
						cal.setTime(old);
						cal.add(Calendar.DATE, i);
						arrDate.add(cal.getTime());
						arrZscore.add((double) 0);
					}
					arrFreq.add(Double.parseDouble(Bytes.toString(keyValue.getValue())));
					arrDate.add(currentDate);

					if(Calculations.daysDifferences(currentDate, dFirst) <= 100) continue;

					double[] currentFreq = Calculations.arrayListToDouble(arrFreq);
					double mean = Calculations.calculateMean(arrFreq);
					double standardDeviation = Calculations.calculateVariance(arrFreq, mean);
					double zScore = Calculations.calculateZscore(currentFreq[currentFreq.length - 1], mean,(double)standardDeviation);
					arrZscore.add(zScore);
					String fantom = Bytes.toString(keyValue.getRow());
					String realW = utils.primitif.StringUtils.getRealWorldFromStem(tableStem, keyValue.getRow());
					//TXTUtils.generateTxtFile(Bytes.toString(keyValue.getQualifier()),String.valueOf(proportion),String.valueOf(zScore));
					/*System.out.println("Trend detected: " + realW
					+" | STEM: " + Bytes.toString(keyValue.getRow())
					+" | DATE: " + Bytes.toString(keyValue.getQualifier())
					+" | FREQ: " + currentFreq[currentFreq.length - 1]
					                           +" | ZSCORE: " + zScore
					                           +" | AVGFREQ: " + mean);*/
					Put zscorePut = new Put(keyValue.getRow());
					zscorePut.add(Bytes.toBytes("date"), keyValue.getQualifier(), Bytes.toBytes(String.valueOf(zScore)));
					zscoreAllEvent.put(zscorePut);
					/*if((proportion > 0 && proportion <= 5) || ArrayListUtils.isAZeroFrequencyWord(arrFreq)){
						if(currentFreq[currentFreq.length - 1] <= 8.906493923690638E-4 * 10) continue;	
						if(Bracketing.IsUpperZScore(proportion, zScore)){
							if(printCollocationForATrendInOpinion(
									tableTopic,tablePolls,
									searchOpinion,realW,Bytes.toString(keyValue.getQualifier()))){
								System.out.println("Trend detected: " + realW
										+" | PROP: " + proportion
										+" | STEM: " + Bytes.toString(keyValue.getRow())
										+" | DATE: " + Bytes.toString(keyValue.getQualifier())
										+" | FREQ: " + currentFreq[currentFreq.length - 1]
										                           +" | ZSCORE: " + zScore
										                           +" | AVGFREQ: " + mean);

							}
						}
					}else 
						if(Bracketing.IsUpperZScore(proportion, zScore)){
							if(printCollocationForATrendInOpinion(
									tableTopic,tablePolls,
									searchOpinion,realW,Bytes.toString(keyValue.getQualifier()))){
								System.out.println("Trend detected: " + realW
										+" | PROP: " + proportion
										+" | STEM: " + Bytes.toString(keyValue.getRow())
										+" | DATE: " + Bytes.toString(keyValue.getQualifier())
										+" | FREQ: " + currentFreq[currentFreq.length - 1]
										                           +" | ZSCORE: " + zScore
										                           +" | AVGFREQ: " + mean);

							}

						}*/

					block++;
				}
				block = 0;
			}
		}
		frequencyAllEvent.close();
		trendAllEvent.close();
		zscoreAllEvent.close();
		tableStem.close();
		System.exit(-1);
	}

	public static void getTrendsPerDate() throws IOException, ClassNotFoundException, SQLException{
		HTable trendsPerDate = new HTable(config, "trendsPerDate");
		byte[] wordFamily = Bytes.toBytes("word");
		ResultScanner scanner = trendsPerDate.getScanner(new Scan());
		for (Result res = scanner.next(); res != null; res = scanner.next()) {
			// The current trend is the row value
			System.out.println("----> " + Bytes.toString(res.getRow()));

			List<KeyValue> value = res.list();
			for(KeyValue keyValue: value){
				System.out.println(Bytes.toString(keyValue.getQualifier()));
			}
		}
		trendsPerDate.close();
	}


	/** create 'collocationOpinion','trend' **/
	/** truncate 'collocationOpinion' **/
	/** opinion table structure: row: date, famille; topicid value BODY; **/
	public static boolean printCollocationForATrendInOpinion(HTable tableTopic,HTable tablePolls,HTable searchOpinion,String Trends,String sDate) throws IOException{
		byte[] dateFamily = Bytes.toBytes("date");
		ResultScanner scanner = tableTopic.getScanner(dateFamily, Bytes.toBytes(sDate));
		HashSet<String> TopicHash = new HashSet<String>();
		HashSet<String> TopicAndPollTitle = new HashSet<String>();
		String lastTitle = "";

		/** find trend in topic title **/
		for (Result res = scanner.next(); res != null; res = scanner.next()) {
			String currentTitle = Bytes.toString(res.getValue(dateFamily,Bytes.toBytes(sDate)));
			if((currentTitle).replaceAll("[?!,]", "").toLowerCase().contains(Trends)){
				TopicAndPollTitle.add(currentTitle);
				lastTitle = Bytes.toString(res.getValue(dateFamily,Bytes.toBytes(sDate)));
				TopicHash.add(Bytes.toString(res.getRow()));
			}
		}

		/** find trend into polls title **/
		Result res = tablePolls.get(new Get(Bytes.toBytes(sDate)));

		ArrayList<String> coll = new ArrayList<String>();
		Hashtable<String,Integer> collocationFrequent = new Hashtable<String,Integer>();
		Hashtable<String,Integer> singleWordFrequent = new Hashtable<String,Integer>();
		int nbPolls = 0;
		/** find trend in topic title **/
		for (KeyValue kv : res.list()) {
			String currentTitle = Bytes.toString(kv.getValue());
			if((currentTitle).replaceAll("[?!,]", "").toLowerCase().contains(Trends)){
				TopicAndPollTitle.add(currentTitle);
				nbPolls++;
				String currentPolls = StringUtils.removeBadCharButKeepSpace(currentTitle).toLowerCase();
				String [] arreyCurrentOp = currentPolls.split(" ");
				for(int i = 0; i < arreyCurrentOp.length; i++){
					String currentValue = arreyCurrentOp[i];
					if(Tools.isStopWord(currentValue)) continue;
					coll.add(currentValue);
					utils.primitif.HashUtils.putToHashAndIncrement(singleWordFrequent,currentValue);
				}

			}
		}

		/** find collocation in opinion related to topic id trend **/

		int cptOpinions = 0;
		for(String topicId : TopicHash){
			ResultScanner scanOpinion = searchOpinion.getScanner(Bytes.toBytes("topicId"), Bytes.toBytes(topicId));
			for (Result resOpinion = scanOpinion.next(); resOpinion != null; resOpinion = scanner.next()) {
				cptOpinions++;
				String currentOpinion = StringUtils.removeBadCharButKeepSpace(Bytes.toString(resOpinion.getValue(Bytes.toBytes("topicId"),  Bytes.toBytes(topicId)))).toLowerCase();
				String [] arreyCurrentOp = currentOpinion.split(" ");
				for(int i = 0; i < arreyCurrentOp.length; i++){
					String currentValue = arreyCurrentOp[i];
					if(Tools.isStopWord(currentValue)) continue;
					coll.add(currentValue);
					utils.primitif.HashUtils.putToHashAndIncrement(singleWordFrequent,currentValue);
				}
			}
		}

		if(coll.size() > 1){
			for(int i = 0; i < coll.size() - 1;i++){
				for(int j = i + 1; j < Math.min(coll.size(), j + 5); j++){
					if(coll.get(i).equals(coll.get(j))) continue;
					utils.primitif.HashUtils.putToHashAndIncrement(collocationFrequent,StringUtils.concatTwoWord(coll.get(i),coll.get(j)));
				}
			}
		}
		String s = "";
		if(collocationFrequent.size() > 0){
			StringBuilder sb = new StringBuilder();
			/*sb.append("\n");
			sb.append("TITLE: " + lastTitle+ " | DATE: " + sDate);
			sb.append("\n");
			sb.append("\n");
			sb.append("number of opinion : " + cptOpinions + " | TREND:" + Trends);
			sb.append("\n");
			sb.append("\n");
			cptOpinions = 0;
			sb.append(HashUtils.sortValue(singleWordFrequent));
			sb.append("\n");
			sb.append("\n");
			sb.append(HashUtils.sortValue(collocationFrequent));
			sb.append("\n");
			sb.append("\n");
			Exporter.LogUtils.writeToFile(sb.toString());*/
			int i = 0;

			ArrayList<Map.Entry<?, Integer>> collW = HashUtils.sortValue(collocationFrequent);
			if(collW.size() > 0){
				/*sb.append("TREND: " + Trends + " | DATE: " + sDate + " | NBOpinion: " + cptOpinions + " " +
						"| NBPolls: " + nbPolls + " " +
				"| Top Col: {");*/

				for(Map.Entry<?, Integer> map: collW){
					if(map.getValue() > 1){
						i++;
						if(collW.size() > 2){
							if(i <= 3){
								sb.append(map.getKey());
								sb.append(" - ");
							}else
								break;
						}else{
							sb.append(map.getKey());
							sb.append(" - ");
							break;
						}
					}else{
						sb.append(map.getKey());
						break;
					}
				}
				//sb.append("}");
				s = sb.toString();
				if(s.endsWith(" - ")){
					s = s.substring(0, s.length() - 3);
				}
			}
		}
		if(collocationFrequent.size() > 0 && s.length() > 0){
			HTable trendsPerDate = new HTable(config, "trendsPerDate");
			byte[] wordFamily = Bytes.toBytes("word");

			Put putTrends = new Put(Bytes.toBytes(sDate));
			StringBuilder sbTitle = new StringBuilder();
			for(String sTitle : TopicAndPollTitle){
				sbTitle.append(sTitle);
				sbTitle.append("\n");
			}
			putTrends.add(wordFamily, Bytes.toBytes(s), Bytes.toBytes(sbTitle.toString()));
			trendsPerDate.put(putTrends);
			trendsPerDate.close();
		}
		return collocationFrequent.size() > 0;
	}

	public static void findOnlyOneCollocationForTrendyTopic() throws IOException, ClassNotFoundException, SQLException{

		HTable zscoreAllEvent = new HTable(config, "zscoreAllEvent");
		HTable trendAllEvent = new HTable(config, "trendsPerDate");
		HTable trendsFinalCollocation = new HTable(config, "trendsFinalCollocations");
		ResultScanner scanner = trendAllEvent.getScanner(new Scan());

		for (Result res = scanner.next(); res != null; res = scanner.next()) {
			for(KeyValue kv: res.list()){
				String[] col = Bytes.toString(kv.getQualifier()).split("-");
				double max = 0;
				double temp = 0;
				int i = 0;
				int iMax = 0;
				for(String s : col){
					String[] subCol = s.trim().split(" ");
					if(subCol.length == 2){
						if(i == 0){
							max = getZscoreValueForCollocation(
									zscoreAllEvent,
									subCol[0],
									subCol[1],Bytes.toString(kv.getRow()));
							iMax = i;
							i++;
						}else{
							temp = getZscoreValueForCollocation(
									zscoreAllEvent,
									subCol[0],
									subCol[1],Bytes.toString(kv.getRow()));
							if(temp > max){
								max = temp;
								iMax = i;
							}
						}
					}
				}
				System.out.println(col[iMax]);
				trendsFinalCollocation.put(new Put(kv.getRow()).add(Bytes.toBytes("trend"),
						Bytes.toBytes(col[iMax]), kv.getValue()));
			}
		}
		trendsFinalCollocation.close();
	}
	
	public static double getZscoreValueForCollocation(HTable zscoreAllEvent, String a, String b, String date) throws IOException, ClassNotFoundException, SQLException{
		double za = 0;
		double zb = 0;
		byte[] res = zscoreAllEvent.get(new Get(Bytes.toBytes(a))).getValue(Bytes.toBytes("date"), Bytes.toBytes(date));
		if(res != null)
			za = Double.parseDouble(Bytes.toString(res));
		res = zscoreAllEvent.get(new Get(Bytes.toBytes(b))).getValue(Bytes.toBytes("date"), Bytes.toBytes(date));
		if(res != null)
			zb = Double.parseDouble(Bytes.toString(res));
		System.out.println(za);
		System.out.println(zb);
		return za + zb;
	}
}