package apriorialgo;

import java.io.BufferedReader;
import java.io.IOException;
import java.io.InputStreamReader;

import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.filecache.DistributedCache;
import org.apache.hadoop.fs.FSDataInputStream;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.IntWritable;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapreduce.Job;
import org.apache.hadoop.mapreduce.lib.input.FileInputFormat;
import org.apache.hadoop.mapreduce.lib.input.TextInputFormat;
import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat;
import org.apache.hadoop.mapreduce.lib.output.TextOutputFormat;

public class AprioriDriver {

	private static int getLineCount(FileSystem fs, Path inFile) throws IOException {
		FSDataInputStream in = fs.open(inFile);
		int noLine = 0;
		BufferedReader bufferReader = new BufferedReader(new InputStreamReader(in));	     
		while ((bufferReader.readLine()) != null) {
			noLine++;
		}	
		bufferReader.close();
		return noLine;
	}
	
	private static int noOfTransaction = 0;
	
	public static void main(String[] args) throws Exception {
		
		if (args.length < 3) {
			System.out.println("Cmd Error: bin/hadoop jar path/<jarname>.jar apriorialgo.AprioriDriver path/<input data file> path/<output folder> minSupport [noPasses]");
			System.exit(1);
		}
		
		// Map/Reduce applies indefinitely until the frequent itemset becomes empty
		int noPasses = Integer.MAX_VALUE;  
		// Or sets explicitly by by the user		
		if (args.length > 3) {
			try {
				noPasses = Integer.parseInt(args[3]);
			}
			catch (NumberFormatException ex ) {
			}
		}
		
		
		// Pass 1
		int k = 1;
		Configuration conf = new Configuration();		
		System.out.println("Started Job " + k);
		Job job = new Job(conf, "AprioriDriver");		
		job.setJarByClass(AprioriDriver.class);
		job.setMapperClass(C1ItemsetMapper.class);
		job.setReducerClass(C1ItemsetReducer.class);
		job.setOutputKeyClass(Text.class);
		job.setOutputValueClass(IntWritable.class);
		job.setInputFormatClass(TextInputFormat.class);
		job.setOutputFormatClass(TextOutputFormat.class);
		FileSystem fs = FileSystem.get(conf);	
		Path inFile = new Path(args[0]);
		Path outFile = new Path(args[1] + k + "");
		System.out.println("Input file: " + inFile.toUri().toString());
		System.out.println("Output file: " + outFile.toUri().toString());
		if (!fs.exists(inFile)) {
			  System.out.println("Input file not found: " + inFile.toUri().toString());
		}
		if (!fs.isFile(inFile)) {
			System.out.println("Input should be a file, not dir: " + inFile.toUri().toString());
		}
		if (fs.exists(outFile)) {
			System.out.println("Output already exists. Delete: " + outFile.toUri().toString());
			fs.delete(outFile, true);
		}
		FileInputFormat.addInputPath(job, inFile);
		FileOutputFormat.setOutputPath(job, outFile);
		
		noOfTransaction = getLineCount(fs, inFile);
		System.out.println("No. Of Transaction: " + noOfTransaction);
		job.getConfiguration().set("no.trans", noOfTransaction + "");		
		job.getConfiguration().set("min.support", args[2]);
		job.getConfiguration().set("itemset.k", k + "");
		DistributedCache.addCacheFile(inFile.toUri(), job.getConfiguration());
		job.waitForCompletion(true);
		
		// Pass K+1		
		while (k++ <= noPasses) {
			System.out.println("Started Job " + k);
			job = new Job(conf, "AprioriDriver");		
			job.setJarByClass(AprioriDriver.class);
			job.setMapperClass(CKItemsetMapper.class);
			job.setReducerClass(CKItemsetReducer.class);
			job.setOutputKeyClass(Text.class);
			job.setOutputValueClass(IntWritable.class);
			job.setInputFormatClass(TextInputFormat.class);
			job.setOutputFormatClass(TextOutputFormat.class);
			// index0 - raw transaction file
			DistributedCache.addCacheFile(new Path(args[0]).toUri(), job.getConfiguration());		
			inFile = new Path(outFile.toUri().toString() + "/part-r-00000");
			outFile = new Path(args[1] + k + "");
			// index1 - output of last map/reduce
			DistributedCache.addCacheFile(inFile.toUri(), job.getConfiguration());
			System.out.println("Input file: " + inFile.toUri().toString());
			System.out.println("Output file: " + outFile.toUri().toString());
			if (fs.exists(inFile)){
				 int prevFreqItemsets = getLineCount(fs, inFile);
				 if (prevFreqItemsets < 1) { 
					 System.out.println("Finished generating candidates.  Previous L(" + (k - 1) + ") frequent itemset file is empty: " + inFile.toUri().toString());
					 System.exit(1);
				 }
				 else {
					 System.out.println("Generate C(" + k + ") candidates from previous L(" + (k - 1) + ") frequent itemsets. Number of itemset: " + prevFreqItemsets);
				 }
			}
			else {
				 System.out.println("Abort generating candidates.  Previous L(" + (k - 1) + ") frequent itemset file doesn't exist: " + inFile.toUri().toString());
				 System.exit(1);
			}
			if (fs.exists(outFile)) {
				System.out.println("Output directory already exists. Delete: " + outFile.toUri().toString());
				fs.delete(outFile, true);
			}
			FileInputFormat.addInputPath(job, inFile);
			FileOutputFormat.setOutputPath(job, outFile);
			job.getConfiguration().set("no.trans", noOfTransaction + "");		
			job.getConfiguration().set("min.support", args[2]);
			job.getConfiguration().set("itemset.k", k + "");
			job.waitForCompletion(true);
		}
	}
}
