package regular;
import java.io.IOException;

import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.BooleanWritable;
import org.apache.hadoop.io.IntWritable;
import org.apache.hadoop.io.NullWritable;
import org.apache.hadoop.mapreduce.Job;
import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat;
import org.apache.hadoop.mapreduce.lib.output.MultipleOutputs;
import org.apache.hadoop.mapreduce.lib.output.TextOutputFormat;

public class QueryEval {

	public static boolean isRunOnServer = true;

	public static void main(String[] args) throws IOException,
			ClassNotFoundException, InterruptedException {
		try {

			long startTime1 = System.currentTimeMillis();

			Configuration conf = new Configuration();

			String input = "/user/quyet/input";
			String output = "/user/quyet/output";
			String output2 = "/user/quyet/output-2";

			if (isRunOnServer) {
				conf.addResource(new Path(
						"/usr/local/hadoop-1.1.1-bin/conf/core-site.xml"));
				conf.addResource(new Path(
						"/usr/local/hadoop-1.1.1-bin/conf/hdfs-site.xml"));				
				input = args[0];
				output = args[1];
				output2 = output + "-2";
				Initial.LOCAL_GRAPH_QUERY = args[2];
				//Initial.HDFS_GRAPH_QUERY = args[3];				
			} else {
				conf.addResource(new Path(
						"/usr/local/hadoop/conf/core-site.xml"));
				conf.addResource(new Path(
						"/usr/local/hadoop/conf/hdfs-site.xml"));
				Initial.LOCAL_GRAPH_QUERY = "/home/quyet/test/query/automaton-3-3-01.txt";
				Initial.HDFS_GRAPH_QUERY = "/user/quyet/automaton/graphquery.txt";
				Initial.PATH_NODE_CACHE = "/user/quyet/cache";	
				Initial.PATH_INPUT_NODE_CACHE = "/user/quyet/cache/in-node";
				Initial.PATH_OUTPUT_NODE_CACHE = "/user/quyet/cache/out-node";
			}					
			
			Ultility.cacheGraphQuery(conf);

			FileSystem hdfs = FileSystem.get(conf);
			if (hdfs.exists(new Path(output))) {
				hdfs.delete(new Path(output), true);
			}

			Job job = new Job(conf, "QueryEval");
			job.setJarByClass(QueryEval.class);

			job.setMapperClass(EvalMapper.class);
			job.setReducerClass(EvalReducer.class);
			job.setNumReduceTasks(2);

			job.setMapOutputKeyClass(IntWritable.class);
			job.setMapOutputValueClass(RvecWritable.class);

			job.setOutputKeyClass(IntWritable.class);
			// job.setOutputValueClass(BooleanWritable.class);
			job.setOutputValueClass(RvecWritable.class);

			job.setInputFormatClass(GraphFileInputFormat.class);
			GraphFileInputFormat.addInputPath(job, new Path(input));

			FileOutputFormat.setOutputPath(job, new Path(output));
			MultipleOutputs.addNamedOutput(job, "Test", TextOutputFormat.class,	IntWritable.class, RvecWritable.class);

			job.waitForCompletion(true);

			long stopTime1 = System.currentTimeMillis();
			long elapsedTime1 = stopTime1 - startTime1;
			System.out.println("Job 1 time: " + elapsedTime1 / 1000 + " s\n");

			if (hdfs.exists(new Path(output + "/part-r-00000"))) {
				hdfs.delete(new Path(output + "/part-r-00000"), true);
			}
			if (hdfs.exists(new Path(output + "/part-r-00001"))) {
				hdfs.delete(new Path(output + "/part-r-00001"), true);
			}

			if (hdfs.exists(new Path(output2))) {
				hdfs.delete(new Path(output2), true);
			}

			long startTime2 = System.currentTimeMillis();

			Job job2 = new Job(conf, "QueryEvalFinal");
			job2.setJarByClass(QueryEval.class);

			job2.setMapperClass(EvalSecondMapper.class);
			job2.setReducerClass(EvalSecondReducer.class);

			job2.setMapOutputKeyClass(IntWritable.class);
			job2.setMapOutputValueClass(NodeWritable.class);

			job2.setOutputKeyClass(NullWritable.class);
			job2.setOutputValueClass(BooleanWritable.class);

			job2.setInputFormatClass(GraphFileInputFormat.class);
			GraphFileInputFormat.addInputPath(job2, new Path(output));

			FileOutputFormat.setOutputPath(job2, new Path(output2));
			job2.waitForCompletion(true);

			long stopTime2 = System.currentTimeMillis();
			long elapsedTime2 = stopTime2 - startTime2;

			System.out.println("Job 2 time: " + elapsedTime2 / 1000 + " s\n");

			System.out.println("Total time: "
					+ (elapsedTime1 / 1000 + elapsedTime2 / 1000) + " s\n");
						
			System.out.println("Cache path: "+Initial.PATH_NODE_CACHE);
			System.out.println("In-node path: "+Initial.PATH_INPUT_NODE_CACHE);
			System.out.println("Out-node path: "+Initial.PATH_OUTPUT_NODE_CACHE);

			System.exit(0);

		} catch (Exception e) {
			e.printStackTrace();
		}

	}

}
