package trunk;

import org.apache.hadoop.filecache.DistributedCache;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.IntWritable;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapred.FileInputFormat;
import org.apache.hadoop.mapred.FileOutputFormat;
import org.apache.hadoop.mapred.JobClient;
import org.apache.hadoop.mapred.JobConf;
import trunk.core.*;
import trunk.firstSec.daFLongLineOutputFormat;
import trunk.firstSec.daMapperNgoNgo;
import trunk.firstSec.daReducer;
import trunk.firstSec.outputStringWrapper;

import java.net.URI;

public class bruteNgoNgo {
    public static void main(String[] args) throws Exception{

        JobConf conf = new JobConf(bruteHadoop.class);
        conf.setJobName("doopadoopadoopadoopadoop");

        // Give hint for the key-value format of mapper class
        conf.setMapOutputKeyClass(Text.class);
        conf.setMapOutputValueClass(IntWritable.class);

        // Give hint for value output format of the reduce class
        conf.setOutputValueClass(Text.class);
        conf.setOutputValueClass(outputStringWrapper.class);

        // Set map class and reduce class
        conf.setMapperClass(daMapperNgoNgo.class);
        conf.setReducerClass(daReducer.class);

        // Set input and output format
        conf.setInputFormat(daFLongLineInputFormat.class);
        conf.setOutputFormat(daFLongLineOutputFormat.class);

        // Set class for secondary sorting
        //conf.setOutputValueGroupingComparator(trunk.core.Comparator.keyComparator.class);

        FileInputFormat.setInputPaths(conf, new Path(args[1]));
        FileOutputFormat.setOutputPath(conf, new Path(args[2]));

        // Setup DistributedCache
        DistributedCache.addCacheFile(new URI(args[0]), conf);

        JobClient.runJob(conf);
    }
}
