package apriorialgo;

import java.io.BufferedReader;
import java.io.FileReader;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Iterator;
import java.util.StringTokenizer;

import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.filecache.DistributedCache;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.IntWritable;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapreduce.Reducer;

public class CKItemsetReducer extends
		Reducer<Text, IntWritable, Text, IntWritable> {
	private int k = 0;
	private double minSup = -1.0;
	private int noTransaction = -1;	
	private ArrayList<ArrayList<String>> transactions = new ArrayList<ArrayList<String>>();

	@Override
	protected void setup(Context context) throws IOException,
			InterruptedException {
		super.setup(context);
		k = context.getConfiguration().getInt("itemset.k", 0);   
		System.out.println("Enter CKItemsetReducer.set k = " + k);
		noTransaction = context.getConfiguration().getInt("no.trans", -1);
		System.out.println("CKItemsetReducer.noTransaction==" + noTransaction);
		String minSupport = context.getConfiguration().get("min.support",
				"-1.0");
		System.out.println("CKItemsetReducer.minSupport=" + minSupport);
		minSup = Double.valueOf(minSupport).doubleValue();
		Configuration conf = context.getConfiguration();
		try {
			Path[] cacheFiles = DistributedCache.getLocalCacheFiles(conf);
			if (cacheFiles != null && cacheFiles.length > 0) {
				System.out.println("cacheFiles size: " + cacheFiles.length);
				for (Path p : cacheFiles) {
					System.out.println("Cache file: " + p.toUri().toString());
				}
				System.out.println("reading cache file: " + cacheFiles[0].toUri().toString());
				BufferedReader bufReader = new BufferedReader(new FileReader(
						cacheFiles[0].toString()));  // original transaction database file
				try {
					String line;
					while ((line = bufReader.readLine()) != null) {
						//System.out.println(line);
						StringTokenizer tokenizer = new StringTokenizer(line);
						ArrayList<String> transaction = new ArrayList<String>();
						while (tokenizer.hasMoreTokens()) {
							transaction.add(tokenizer.nextToken().trim());
						}
						transactions.add(transaction);
					}
				} finally {
					bufReader.close();

				}
			}
		} catch (IOException e) {
			System.err.println("Exception reading DistributedCache: " + e);
		}
		System.out.println("Exit.  Transaction table size: "
				+ transactions.size());
	}

	@Override
	public void reduce(Text key, Iterable<IntWritable> values, Context context)
			throws IOException {
		System.out.println("Enter CKItemsetReducer.set k = " + k);
		int sum = 0;
		Iterator<ArrayList<String>> transactionIter = transactions.iterator();
		while (transactionIter.hasNext()){
			ArrayList<String> transaction = transactionIter.next();
			StringTokenizer tokenizer = new StringTokenizer(key.toString());
			boolean foundMatch = false;
			while (tokenizer.hasMoreElements()) {
				if (transaction.contains(tokenizer.nextToken().trim())) {
					foundMatch = true;
				}				
				else {
					foundMatch = false;
					//System.out.println("No match found in transaction table for candidate: " + key.toString());
					break;					
				}
			}
			if (foundMatch) {
				//System.out.println("Found match in transaction table for candidate: " + key.toString());
				sum++;
			}
		}
		if ((sum / (double) (noTransaction)) >= minSup) {
			try {
				context.write(key, new IntWritable(sum));
			} catch (InterruptedException e) {
				// TODO Auto-generated catch block
				e.printStackTrace();
			}
		}		
	}
}
