/*
 * To change this template, choose Tools | Templates
 * and open the template in the editor.
 */
package Examples;

import java.io.IOException;
import java.util.Date;
import java.util.Iterator;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.conf.Configured;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.LongWritable;
import org.apache.hadoop.io.NullWritable;
import org.apache.hadoop.mapred.JobClient;
import org.apache.hadoop.mapred.JobConf;
import org.apache.hadoop.mapred.MapReduceBase;
import org.apache.hadoop.mapred.OutputCollector;
import org.apache.hadoop.mapred.Reducer;
import org.apache.hadoop.mapred.Reporter;
import org.apache.hadoop.mapred.SequenceFileInputFormat;
import org.apache.hadoop.mapred.SequenceFileOutputFormat;
import org.apache.hadoop.mapred.lib.HashPartitioner;
import org.apache.hadoop.mapred.lib.IdentityMapper;
import org.apache.hadoop.util.Tool;
import org.apache.hadoop.util.ToolRunner;

/**
 *
 * @author takeshi
 */
public class SUM_MR extends Configured implements Tool {

    public static class MyReducer extends MapReduceBase implements Reducer<LongWritable, LongWritable, NullWritable, LongWritable> {

        public void reduce(LongWritable key, Iterator<LongWritable> values, OutputCollector<NullWritable, LongWritable> output, Reporter reporter) throws IOException {
            LongWritable rst = new LongWritable(0);
            while (values.hasNext()) {
                rst = new LongWritable(rst.get() + values.next().get());
            }
            output.collect(NullWritable.get(), rst);
        }
    }

    public int run(String[] args) throws Exception {
        Configuration conf = getConf();
        JobConf job = new JobConf(conf, this.getClass());
        job.setJarByClass(this.getClass());
        //   job = initJobConf1(job, IdentityMapper.class, MyFirstReducer.class, args);
        SequenceFileInputFormat.addInputPath(job, new Path(args[0]));
        SequenceFileOutputFormat.setOutputPath(job, new Path(args[1]));
        job.setJobName("ListHomoMorphismStep1");
        job.setMapperClass(IdentityMapper.class);
        job.setReducerClass(MyReducer.class);

        job.setInputFormat(SequenceFileInputFormat.class);
        job.setOutputFormat(SequenceFileOutputFormat.class);

        job.setMapOutputKeyClass(LongWritable.class);
        job.setOutputKeyClass(NullWritable.class);
        job.setOutputValueClass(LongWritable.class);

        job.setPartitionerClass(HashPartitioner.class);

        int redNum = 1;
        int idx = HasParameter("-R", args);
        if (idx >= 0 && args.length > idx) {
            redNum = Integer.parseInt(args[idx + 1]);
        }
        job.setNumReduceTasks(redNum);

        Date startTime = new Date();
        System.out.println("The 1st Job started: " + startTime);

        JobClient.runJob(job).waitForCompletion();
        Date end_time = new Date();
        System.out.println("All Jobs ended: " + end_time);
        System.out.println("The sum  job took "
                + (end_time.getTime() - startTime.getTime()) / 1000 + " seconds.");

        if (job == null) {
            // LOG.error(" job1 init  failed ");
            return -1;
        }
        return 0;
    }

    public static int HasParameter(String query, String[] pars) {
        boolean rst = false;
        int id = -1; //pars.length;
        for (String par : pars) {
            id++;
            if (par.equals(query)) {
                rst = true;
                //  id ++;
                break;
            }
        }
        if (!rst) {
            id = -1;
        }
        return id;
    }

    public static void main(String[] args) throws Exception {

        Configuration conf = new Configuration();
        // add user conf file
        // Path userConfFile = new Path(args[2]);
        //   conf.addResource(userConfFile);
        JobConf job = new JobConf(conf);
        int res = ToolRunner.run(job, new SUM_MR(), args);
        System.exit(res);

    }
}
