/** Naive NN-ZDD with Hashing
 * 
 * 
 * @author Yong Boon, Lim (yongboon.lim@gmail.com)
 */
package core;

import java.io.File;
import java.io.IOException;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.HashSet;
import java.util.Map;
import java.util.Set;

import search.FileIndexBuilderRaw;
import search.SimpleSearchRanker;
import util.DocUtils;
import util.FileFinder;

public class NNZDDHash extends NNZDDBasic {
	// Hash 
	int _hashBits;					// Hash Bits
	// ZDD
	// lucene
	FileIndexBuilderRaw _fileIndexBuilderRaw;
	
	public NNZDDHash(String trainDataDir, int hashBits) {
		super(trainDataDir);
		_hashBits = hashBits;
		_nnDescShort = "NN-ZDD-HASH-LUC-" + Integer.toString(hashBits);
		_nnDescLong = "Nearest Neighbor with ZDD Hashing (Using Lucene to retrieve doc)";		
	}
		
	// Convert topic to ZDD
	// Hashing all
	public int topic2Zdd(HashSet<String> hashFuncValue) {
		int zddTopic = _zdd.base();
		for (String s : hashFuncValue) {
			if (!_stemVarDict.containsKey(s)) {
				int zddVar = _zdd.createVar();
				_stemVarDict.put(s, zddVar);
			}
			zddTopic = changeWith(zddTopic, _stemVarDict.get(s));
		}
		return zddTopic;		
	}
	
	// Use the hashcode of the original string, and take n-bits from the rights
	private static int hashKey(String corpus, int bits) {
		int hashStr = corpus.hashCode();
		int bitmask ;
		if (bits < 32) {
			bitmask = (1 << bits) - 1;
		} else {
			bitmask = 0xFFFFFFFF;
		}
        return hashStr & bitmask;
	}
	
	// Convert feature HashMap to HashSet
	private static HashSet<String> hashSet(Map<Object, Double> featureMap, int hashBits) {
		HashSet<String> resultSet = new HashSet<String>();
		for (Map.Entry<Object, Double> me : featureMap.entrySet()) {
			resultSet.add(Integer.toString(hashKey((String)me.getKey(), hashBits)));
		}
		return resultSet;
	}
	
	// Convert file to hash set
	protected HashSet<String> file2Hash(File f){
		String fileContent = DocUtils.ReadFile(f);
		Map<Object, Double> wordCount = DocUtils.ConvertToFeatureMapFilter(fileContent);
		HashSet<String> hashFuncValieFile = hashSet(wordCount, _hashBits);
		HashMap<Integer, Integer> hashFuncValue = new HashMap<Integer, Integer>();

		return hashFuncValieFile;
	}
	
	@Override
	protected double reduceScoreMain(int zdd, String targetFilename) {
		File targetFile = new File(targetFilename);
		HashSet<String> hashFuncValue = file2Hash(targetFile); 
		_nodeMaxBranchDict = new HashMap<Integer, Boolean>();
		return reduceScore(zdd, hashFuncValue);
	}
	
	public double reduceScore(int zdd, HashSet<String> hashFuncValueTarget) {
		// Empty Set
		if (zdd == 0)
			return _zddEmptyCost;
		// Base Set
		if (zdd == 1)
			return _zddBaseCost;

		int zddLow = _zdd.subset0(zdd, _zdd.getVar(zdd));
		int zddHigh = _zdd.subset1(zdd, _zdd.getVar(zdd));
		
		// Calculate hamming distance
		int zddContain = (hashFuncValueTarget.contains(_stemVarInvDict.get(_zdd.getVar(zdd)))) ? 1 : 0;

		double zddHighCost = reduceScore(zddHigh, hashFuncValueTarget) + zddContain;
		double zddLowCost = reduceScore(zddLow, hashFuncValueTarget);

		double zddCost = 0;
		boolean maxBranch;

		if (zddHighCost > zddLowCost) {
			zddCost = zddHighCost;
			maxBranch = true;
		} else {
			zddCost = zddLowCost;
			maxBranch = false;
		}
		_nodeMaxBranchDict.put(zdd, maxBranch);

		return zddCost;
	}
	
	@Override
	public void build() {
		try {
			ArrayList<File> files = FileFinder.GetAllFiles( _trainDataDir, "", true);
			// ZDD Variable
			int zddTopic;
			int zddTmp = _zdd.base();

			_fileIndexBuilderRaw = new FileIndexBuilderRaw(_indexPath);
			int numFileProcessed = 0;
			
			for (File f : files) {
				HashSet<String> hashFuncValue = file2Hash(f);				
				
				// ZDD Union
				zddTopic = topic2Zdd(hashFuncValue);
				zddTmp = unionWith(zddTmp, zddTopic);
				
				// Add to Lucene
				_fileIndexBuilderRaw.addFile(f.getPath(), hashFuncValue);
					
				numFileProcessed++;
				if (numFileProcessed%1000 == 0) {
					System.out.println("File Processed: " + numFileProcessed);
				}

			}
			_zddRoot = zddTmp;
		} catch (IOException e) {
			e.printStackTrace(System.err);
		} finally {
			_fileIndexBuilderRaw.close();
		}
	}
	
	@Override
	public String query(String targetDataName) {
		double sim = reduceScoreMain(_zddRoot, targetDataName);
		Set<String> setExtract = new HashSet<String>();
		extractSet(_zddRoot, setExtract);

		if (DEBUG) {
			System.out.println(set2String(setExtract));
		}
		
		try {
			_luceneSearch = new SimpleSearchRanker(_indexPath, default_field, _fileIndexBuilderRaw._analyzer);
			return _luceneSearch.doSearch(set2String(setExtract));
		} catch (Exception e) {
			e.printStackTrace();
		}
		return "";
	}
	
	@Override
	public void clear() {
		super.clear();
		_fileIndexBuilderRaw = null;
	}
	
	/**
	 * @param args
	 */
	public static void main(String[] args) {
		// TODO Auto-generated method stub
		// TODO Auto-generated method stub
//		String trainDataDir = "data/more_newsgroups/alt.atheism";
//		String trainDataDir = "data/test_data";
//		String targetFile = trainDataDir + "/00000.txt";
		String trainDataDir = "data/test_data2";
		String targetFile = trainDataDir + "/57110";
		NNZDDBasic nnZDD = new NNZDDHash(trainDataDir, 10);
//		nnZDD.DEBUG = true;
		nnZDD.init();
		nnZDD.build();
//		nnZDD.query(targetFile);
//		System.out.println(nnZDD.set2String(nnZDD.extractSet()));

	}

}
