package hbase.classification.old;

import java.io.ByteArrayInputStream;
import java.io.IOException;
import java.io.ObjectInputStream;
import java.sql.SQLException;
import java.util.ArrayList;
import java.util.HashSet;
import java.util.Hashtable;
import java.util.List;
import java.util.NavigableMap;
import java.util.Map.Entry;

import org.apache.hadoop.hbase.HBaseConfiguration;
import org.apache.hadoop.hbase.KeyValue;
import org.apache.hadoop.hbase.client.Get;
import org.apache.hadoop.hbase.client.HTable;
import org.apache.hadoop.hbase.client.Put;
import org.apache.hadoop.hbase.client.Result;
import org.apache.hadoop.hbase.client.ResultScanner;
import org.apache.hadoop.hbase.client.Scan;
import org.apache.hadoop.hbase.util.Bytes;

import stemming.tools.Tools;

public class classificationOld {
	public static void printColloctionForATrendSingleWordAtDate(HTable tableTopic,String Trends,String sDate) throws IOException{
		StringBuffer buff = new StringBuffer();
		byte[] dateFamily = Bytes.toBytes("date");
		ResultScanner scanner = tableTopic.getScanner(dateFamily, Bytes.toBytes(sDate));
		int howManyFound = 0;
		HashSet<String> results = new HashSet<String>();

		int iT = 0;
		for (Result res = scanner.next(); res != null; res = scanner.next()) {
			String currentTitle = Bytes.toString(res.getValue(dateFamily,Bytes.toBytes(sDate)));
			if((currentTitle).replaceAll("[?!,]", "").toLowerCase().contains(Trends)){
				String []splitTitle = currentTitle.split(" ");
				boolean find = false;
				ArrayList<String> arrNonStopWord = new ArrayList<String>();
				for(int i = 0; i < splitTitle.length; i++){
					String cleanCurrent = utils.primitif.StringUtils.removeBadChar(splitTitle[i]);
					if(Tools.isStopWord(cleanCurrent)) continue;
					if(cleanCurrent.toLowerCase().contains(Trends)){
						find = true;
						howManyFound++;
						if(arrNonStopWord.size() > 0){
							String toPut =utils.primitif.StringUtils.removeBadChar(arrNonStopWord.get(arrNonStopWord.size() - 1)); 
							buff.append(toPut);
							buff.append(" ");								
						}
						if(i == splitTitle.length - 1)
							buff.append(Trends);
					}else{
						arrNonStopWord.add(cleanCurrent);
						if(find){
							if(iT++ == 0)
								buff.append(Trends);
							buff.append(" ");
							buff.append(cleanCurrent);
						}
					}
				}
				results.add(buff.toString());
				iT = 0;
				buff = new StringBuffer();
				find = false;
			}
		}
		if(howManyFound > 1){
			System.out.println(results);
			System.out.println(Trends);
			System.out.println("--------");
		}

	}
	
	public static void storeTrendsPerDate() throws IOException, ClassNotFoundException, SQLException{
		@SuppressWarnings("deprecation")
		HBaseConfiguration config = new HBaseConfiguration();
		HTable trendAllEvent = new HTable(config, "trendsAllEvent");
		HTable trendsPerDate = new HTable(config, "trendsPerDate");
		HTable tableStem = new HTable(config, "stem");
		byte[] wordFamily = Bytes.toBytes("word");
		byte[] stemFamily = Bytes.toBytes("stem");
		ResultScanner scanner = trendAllEvent.getScanner(new Scan());
		for (Result res = scanner.next(); res != null; res = scanner.next()) {
			// The current trend is the row value
			List<KeyValue> value = res.list();
			for(KeyValue keyValue: value){
				String thisValue = Bytes.toString(keyValue.getRow());
				String[] thisSplit = thisValue.split(" ");
				String realWordS;
				if(thisSplit.length > 1){
					byte[] realWord1 = tableStem.get(new Get(Bytes.toBytes(thisSplit[0]))).getValue(stemFamily, Bytes.toBytes(thisSplit[0]));
					byte[] realWord2 = tableStem.get(new Get(Bytes.toBytes(thisSplit[1]))).getValue(stemFamily, Bytes.toBytes(thisSplit[1]));
					realWordS = Bytes.toString(realWord1);
					String realWordS2 = Bytes.toString(realWord2);
					if(realWord1 == null){
						realWordS = thisSplit[0];
					}
					if(realWord2 == null){
						realWordS2 = thisSplit[1];
					}
					thisValue = realWordS + " " + realWordS2;
				}else{
					byte[] realWord1 = tableStem.get(new Get(Bytes.toBytes(thisValue))).getValue(stemFamily, Bytes.toBytes(thisValue));
					if(realWord1 != null){
						thisValue = Bytes.toString(realWord1);
					}
				}
				trendsPerDate.put(new Put(keyValue.getQualifier()).add(wordFamily, Bytes.toBytes(thisValue), keyValue.getValue()));
			}
		}
		trendAllEvent.close();
		trendsPerDate.close();
	}
	
	/**
	 * Search collocation for trending word - display collocation only
	 * @throws IOException
	 * @throws ClassNotFoundException
	 * @throws SQLException
	 */
	public static void mutualInformationBetweenCollocationAndTrends() throws IOException, ClassNotFoundException, SQLException{
		@SuppressWarnings("deprecation")
		HBaseConfiguration config = new HBaseConfiguration();
		HTable trendAllEvent = new HTable(config, "trendsAllEvent");
		HTable trendAllEventCollocation = new HTable(config, "trendsAllEventCollocation");
		HTable frequencyAllEvent = new HTable(config, "frequencyAllEvent");
		HTable tableStem = new HTable(config, "stem");
		byte[] dateFamily = Bytes.toBytes("date");
		byte[] stemFamily = Bytes.toBytes("stem");
		ResultScanner scanner = trendAllEvent.getScanner(new Scan());
	
		for (Result res = scanner.next(); res != null; res = scanner.next()) {
			// The current trend is the row value
			List<KeyValue> value = res.list();
			boolean findCollocation = false;
			for(KeyValue keyValue: value){
				String currentStringValue = Bytes.toString(keyValue.getRow());
				ResultScanner frequencyAtDate = frequencyAllEvent.getScanner(dateFamily,keyValue.getQualifier());
				for (Result resFr = frequencyAtDate.next(); resFr != null; resFr = frequencyAtDate.next()) {
					String currentFr = (Bytes.toString(resFr.getRow()));
					String[] collocSplit = currentFr.split(" ");
					if(collocSplit.length > 1){
						if(currentFr.contains(currentStringValue)){
							byte[] realWord1 = tableStem.get(new Get(Bytes.toBytes(collocSplit[0]))).getValue(stemFamily, Bytes.toBytes(collocSplit[0]));
							byte[] realWord2 = tableStem.get(new Get(Bytes.toBytes(collocSplit[1]))).getValue(stemFamily, Bytes.toBytes(collocSplit[1]));
							String realWordS = Bytes.toString(realWord1);
							String realWordS2 = Bytes.toString(realWord2);
							if(realWord1 == null){
								realWordS = collocSplit[0];
							}
							if(realWord2 == null){
								realWordS2 = collocSplit[1];
							}
							String finalCollcation = realWordS + " " + realWordS2;
							trendAllEventCollocation.put(new Put(Bytes.toBytes(finalCollcation)).add(dateFamily, keyValue.getQualifier(), resFr.value()));
							System.out.println(finalCollcation + " | DATE: " + Bytes.toString(keyValue.getQualifier()));
							findCollocation = true;
						}
					}
				}
				if(!findCollocation){
					//System.out.println(Bytes.toString(keyValue.getRow()));
					//System.out.println(Bytes.toString(keyValue.getQualifier()));
				}
			}
			findCollocation = false;
		}
		trendAllEvent.close();
		frequencyAllEvent.close();
		tableStem.close();
		trendAllEventCollocation.close();
	}
	
	/**
	 * Get all collocation and add it inside an hashtable <Collocation,distance from the parent word>
	 * @param s
	 * @param window
	 * @return the hashtable <Collocation,distance from the parent word>
	 */
	public static Hashtable<String,Integer> getCollocation(String s, int window){
		Hashtable<String,Integer> collocations = new Hashtable<String,Integer>();
		Hashtable<String,String> stemString = new Hashtable<String, String>();
		String[] sArray = s.split(" ");
		for(int i = 0; i < sArray.length; i++){
			int currentWindow = window;
			if((sArray.length - i) < window){
				currentWindow = sArray.length - i;
				if(currentWindow < 2){
					break;
				}
			}
			if(Tools.isStopWord(sArray[i]))
				continue;
			if(!stemString.containsKey(sArray[i])){
				stemString.put(sArray[i],Tools.stem(utils.primitif.StringUtils.removeBadChar(sArray[i])).toLowerCase());
			}
			for(int j = i + 1; j < currentWindow; j++){
				if(Tools.isStopWord(sArray[j]))
					continue;
				if(!stemString.containsKey(sArray[j])){
					stemString.put(sArray[j],Tools.stem(utils.primitif.StringUtils.removeBadChar(sArray[j])).toLowerCase());
				}
				collocations.put((stemString.get(sArray[i]).concat(" " + stemString.get(sArray[j]))),j-i);
			}
		}
		return collocations;
	}
	
	@SuppressWarnings("deprecation")
	public static void classifyNumberDocPerDate() throws IOException{
		HBaseConfiguration config = new HBaseConfiguration();
		HTable tableDoc = new HTable(config, "docWeight");
		ResultScanner scanner =tableDoc.getScanner(new Scan());

		HTable tableDocDate = new HTable(config, "docDate");
		byte[] docFamily = Bytes.toBytes("nbdoc");

		int sum = 0;
		for (Result res = scanner.next(); res != null; res = scanner.next()) {
			byte[] key = res.getRow();
			NavigableMap<byte[], byte[]> mapNav = res.getFamilyMap(Bytes.toBytes("doc"));
			for(Entry<byte[], byte[]> columnFamilyEntry : mapNav.entrySet()){
				byte[] columnMap = columnFamilyEntry.getValue();
				sum+=Integer.parseInt(Bytes.toString(columnMap));
			}


			Put docPut = new Put(key);
			docPut.add(docFamily, Bytes.toBytes(String.valueOf(res.size())), Bytes.toBytes(String.valueOf(sum)));
			tableDocDate.put(docPut);

			System.out.println(Bytes.toString(key) + ":" + res.size() + " -> value: " +sum);
			sum = 0;
		}
		tableDocDate.close();
		tableDoc.close();
	}

	public static void classifyfrequency() throws IOException, ClassNotFoundException{
		@SuppressWarnings("deprecation")
		HBaseConfiguration config = new HBaseConfiguration();
		HTable tableDoc = new HTable(config, "docDate");
		HTable tableWord = new HTable(config, "wordFreq");
		HTable tableDocWeight = new HTable(config, "docWeight");
		HTable tableFrequency = new HTable(config, "frequency");

		int cpt = 0;
		ResultScanner scanner =tableWord.getScanner(new Scan());

		byte[] dateFamily = Bytes.toBytes("date");

		for (Result res = scanner.next(); res != null; res = scanner.next()) {
			byte[] key = res.getRow();
			System.out.println(Bytes.toString(key));
			NavigableMap<byte[], byte[]> mapNav = res.getFamilyMap(dateFamily);
			for(Entry<byte[], byte[]> columnFamilyEntry : mapNav.entrySet()){
				byte[] columnMap = columnFamilyEntry.getValue();
				byte[] dateV = columnFamilyEntry.getKey();
				ByteArrayInputStream bis = new ByteArrayInputStream(columnMap);
				ObjectInputStream obj_in = new ObjectInputStream(bis);
				@SuppressWarnings("unchecked")
				ArrayList<Integer> doc = (ArrayList<Integer>) obj_in.readObject();
				int sumVote = 0;
				for(Integer i : doc){
					byte[] docFamily = Bytes.toBytes("doc");
					byte[] col1 = Bytes.toBytes(i.toString());
					Get docCountVote = new Get(dateV);
					byte[] result = tableDocWeight.get(docCountVote).getValue(docFamily,col1);
					if(result != null)
						sumVote += Integer.parseInt(Bytes.toString(result));
				}

				byte[] nbDocFamily = Bytes.toBytes("nbdoc");
				Result result2 = tableDoc.get(new Get(dateV));
				NavigableMap<byte[], byte[]> mapNavNbDocCount = result2.getFamilyMap(nbDocFamily);
				double frequency = 0;
				for(Entry<byte[], byte[]> columnFamilyDocument : mapNavNbDocCount.entrySet()){
					int nbDoc = Integer.parseInt(Bytes.toString(columnFamilyDocument.getKey()));
					int nbTotalVote = Integer.parseInt(Bytes.toString(columnFamilyDocument.getValue()));
					System.out.println(nbDoc);
					System.out.println(nbTotalVote);
					frequency = (doc.size() / (double)nbDoc)+(sumVote / (double)nbTotalVote);
				}
				Put docPut = new Put(key);
				docPut.add(dateFamily, dateV, Bytes.toBytes(String.valueOf(frequency)));
				tableFrequency.put(docPut);
				sumVote = 0;

				System.out.println(++cpt);

			}
		}
		tableWord.close();
		tableDoc.close();
		tableFrequency.close();
		tableDocWeight.close();
	}
}
