/*
 * To change this template, choose Tools | Templates
 * and open the template in the editor.
 */

package homomorphism.list;
import homomorphism.GroupLongPairComparator;
import homomorphism.HomomorphismBase;
import homomorphism.ListPartitioner;
import homomorphism.SecondPartitioner;
import homomorphism.TotalLongPairKeyComparator;
import utility.LongPair;
import java.util.ArrayList;
import java.lang.reflect.*;
import java.util.Date;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.NullWritable;
import org.apache.hadoop.mapred.JobClient;
import org.apache.hadoop.mapred.JobConf;
import org.apache.hadoop.mapred.RunningJob;
import org.apache.hadoop.mapred.SequenceFileInputFormat;
import org.apache.hadoop.mapred.SequenceFileOutputFormat;
import org.apache.hadoop.mapred.TextOutputFormat;
import org.apache.hadoop.mapred.lib.IdentityMapper;
import org.apache.hadoop.util.GenericOptionsParser;
import org.apache.hadoop.util.Tool;
import org.apache.hadoop.util.ToolRunner;

/**
 *
 * @author takeshi
 */
public class BasicListHomomorphismWrapper<InVal,OtVal>  extends HomomorphismBase{
 private static final String MyUsage = "usage:  [input file path]  [output file path] <-M [:num]> <-R [:num]>-h  :[usage]";
    private static final Log LOG = LogFactory.getLog(BasicListHomomorphismWrapper.class);
    public static Method FoldingMethod, UnFoldMethod;
    public static Class userClass;

     public OtVal filter(InVal value) throws Exception {
        throw new RuntimeException("Compiled Code");
    }

    public OtVal plus(OtVal v1, OtVal v2) throws Exception {
        return null;
    }
    public ArrayList<OtVal> aggregator(ArrayList<OtVal> values) throws Exception {
        throw new RuntimeException("Compiled Code");
    }
    
    public static class MyFirstMapper<InVal, OtmVal> extends FirstMapper<InVal, OtmVal> {
    }

    public static class MyFirstReducer<OtVal> extends FirstReducer<OtVal> {
    }

    public static class MySecondMapper<OtVal> extends SecondMapper<OtVal> {
    }

    public static class MySecondReducer<OtVal> extends SecondReducer<OtVal> {
    }

    public int run(String[] args) throws Exception {
        if (args.length < 3) {
            printUsage(this, MyUsage);
            return -1;
        }
        Configuration conf = getConf();
        JobConf job1 = new JobConf(conf, this.getClass());
        job1.setJarByClass(this.getClass());
        job1 = initJobConf1(job1, FirstMapper.class, MyFirstReducer.class, args);
        // reuse the JVM
        job1.setNumTasksToExecutePerJvm(-1);
        
        if (job1 == null) {
             LOG.error(" job1 init  failed ");
            return -1;
        }
        JobConf job2 = new JobConf(getConf(), this.getClass());
        job2.setJarByClass(this.getClass());
        job2 = initJobConf2(job2, IdentityMapper.class, MySecondReducer.class, args);
        // LOG.info("job generating finished!");
        Date startTime = new Date();
        System.out.println("The 1st Job started: " + startTime);
        JobClient.runJob(job1).waitForCompletion();
        JobClient.runJob(job2).waitForCompletion();
        Date end_time = new Date();
        System.out.println("All Jobs ended: " + end_time);
        System.out.println("The 2 jobs took "
                + (end_time.getTime() - startTime.getTime()) / 1000 + " seconds.");

        return 0;
//        } else {
//             LOG.error(" job1 excuting  failed ");
//            return -1;
//        }
    }
    public String inputPaths;

    public void setInputPaths(String inputPaths) {
        this.inputPaths = inputPaths;
    }

    public String getInputPaths() {
        return inputPaths;
    }
    public String outputPath;

    public void setOutputPath(String outputPath) {
        this.outputPath = outputPath;
    }

    public String getOutputPath() {
        return outputPath;
    }
    public RunningJob runningJob;

    public RunningJob getRunningJob() {
        return runningJob;
    }


    public static void printUsage(Tool tool, String extraArgsUsage) {
        System.err.printf("Usage: %s [genericOptions] %s\n\n",
                tool.getClass().getSimpleName(), extraArgsUsage);
        GenericOptionsParser.printGenericCommandUsage(System.err);
    }

    public JobConf initJobConf1(JobConf job,
            // Class UserJarClass,
            Class UserFirstMapperClass,
            Class UserFirstReducerClass,
            String[] args) throws ClassNotFoundException {

       SequenceFileInputFormat.addInputPath(job, new Path(args[0]));
        SequenceFileOutputFormat.setOutputPath(job, new Path(args[1]));
        job.setJobName("ListHomoMorphismStep1");
        job.setMapperClass(UserFirstMapperClass);
        job.setReducerClass(UserFirstReducerClass);

        // job.setOutputValueClass(LongPair.class); UnFoldingAguClass
        ClassLoader myloader = BasicListHomomorphismWrapper.class.getClassLoader();
        String KeyClassName = job.get(IndexKeyClassProperty);
        Class KeyClass = null;
        if (KeyClassName != null && KeyClassName.length() > 0) {
            KeyClass = myloader.loadClass(KeyClassName);
            job.setOutputKeyClass(KeyClass);
            job.setMapOutputKeyClass(KeyClass);
        } else {//default
            job.setOutputKeyClass(LongPair.class); //default LongPair
            job.setMapOutputKeyClass(LongPair.class);
        }
        String OutputClassName = job.get(FoldingOutValClassProperty);
        job.setOutputValueClass(myloader.loadClass(OutputClassName));

        String MapOutputClassName = job.get(FoldingInValClassProperty);
        job.setMapOutputValueClass(myloader.loadClass(MapOutputClassName));
        job.setOutputKeyComparatorClass(TotalLongPairKeyComparator.class);
        job.setOutputValueGroupingComparator(GroupLongPairComparator.class);
        job.setInputFormat(SequenceFileInputFormat.class);
        job.setOutputFormat(SequenceFileOutputFormat.class);
        job.setPartitionerClass(ListPartitioner.class);

        int redNum = 1;
        int idx = HasParameter("-R", args);
        if (idx >= 0 && args.length > idx) {
            redNum = Integer.parseInt(args[idx + 1]);
        }
        job.setNumReduceTasks(redNum);
        //job.setCombinerClass(UserFirstReducerClass);
        return job;
    }

    public JobConf initJobConf2(JobConf job,
            Class User2ndMapperClass,
            Class User2ndReducerClass,
            String[] args)
            throws Exception {

        SequenceFileInputFormat.addInputPath(job, new Path(args[1]));
        TextOutputFormat.setOutputPath(job, new Path(args[1] + "_final"));
        job.setJobName("ListHomoMorphismStep2");
        job.setInputFormat(SequenceFileInputFormat.class);
        job.setOutputFormat(TextOutputFormat.class);
        job.setMapperClass(User2ndMapperClass);

        ClassLoader myloader = ThirdHomomorphismWrapper.class.getClassLoader();
        String KeyClassName = job.get(IndexKeyClassProperty);
        Class KeyClass = null;
        if (KeyClassName != null && KeyClassName.length() > 0) {
            KeyClass = myloader.loadClass(KeyClassName);
            // job.setOutputKeyClass(KeyClass);
            job.setMapOutputKeyClass(KeyClass);
        } else {//default
          //  job.setOutputKeyClass(LongPair.class); //default LongPair
             job.setMapOutputKeyClass(LongPair.class);
        }
        String OutputClassName = job.get(FoldingOutValClassProperty);
        job.setOutputValueClass(myloader.loadClass(OutputClassName));

        String MapOutputClassName = job.get(FoldingInValClassProperty);
        job.setMapOutputValueClass(myloader.loadClass(MapOutputClassName));
        job.setReducerClass(User2ndReducerClass);

        job.setNumReduceTasks(1);
        job.setOutputKeyClass(NullWritable.class);
        job.setOutputKeyComparatorClass(TotalLongPairKeyComparator.class);
        job.setOutputValueGroupingComparator(GroupLongPairComparator.class);
        job.setPartitionerClass(SecondPartitioner.class);
        //job.setCombinerClass(FirstReducer.class);

        return job;
    }

     /**
     * The main method.
     *
     * usage: -in :[input file path] -out :[output file path] -h  :[usage]
     *
     */
    public static void main(String[] args) throws Exception {

        if (args.length < 3) {
            //   System.out.println("the args.length = " + args.length);
            for (int i = 0; i < args.length; i++) {
                System.err.println(args[i]);
            }
            System.err.println("Usage:hadoop jar  <input path> <output path> <userClass>, <folding_type class>...");
            System.exit(-1);
        }

        Configuration conf = new Configuration();
        // add user conf file
        Path userConfFile = new Path(args[2]);
        conf.addResource(userConfFile);
        JobConf job = new JobConf(conf);
        int res = ToolRunner.run(job, new BasicListHomomorphismWrapper(), args);
        System.exit(res);

    }

    public static int HasParameter(String query, String[] pars) {
        boolean rst = false;
        int id = -1; //pars.length;
        for (String par : pars) {
            id++;
            if (par.equals(query)) {
                rst = true;
                //  id ++;
                break;
            }
        }
        if (!rst) {
            id = -1;
        }
        return id;
    }
}
