import java.net.URI;

import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.conf.Configured;
import org.apache.hadoop.filecache.DistributedCache;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.IntWritable;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapred.*;
import org.apache.hadoop.mapred.lib.IdentityReducer;
import org.apache.hadoop.mapred.lib.InputSampler;
import org.apache.hadoop.mapred.lib.TotalOrderPartitioner;
import org.apache.hadoop.util.Tool;
import org.apache.hadoop.util.ToolRunner;
import org.apache.hadoop.mapred.lib.InverseMapper;

public class Sort extends Configured implements Tool {
  private RunningJob jobResult = null;

  public int run(String[] args) throws Exception {

    JobConf jobConf = new JobConf(getConf(), Sort.class);
    jobConf.setJobName("sorter");

    jobConf.setMapperClass(InverseMapper.class);        
    jobConf.setReducerClass(IdentityReducer.class);

    jobConf.setInputFormat(SequenceFileInputFormat.class);
    jobConf.setOutputFormat(TextOutputFormat.class);
    jobConf.setOutputKeyClass(IntWritable.class);
    jobConf.setOutputValueClass(Text.class);

    FileInputFormat.setInputPaths(jobConf, new Path(args[0]));
    FileOutputFormat.setOutputPath(jobConf, new Path(args[1]));
    jobConf.setNumReduceTasks(1);
    jobConf.setPartitionerClass(TotalOrderPartitioner.class);
    InputSampler.Sampler<IntWritable, Text> sampler = 
        new InputSampler.RandomSampler<IntWritable, Text>(0.1, 500, 10);
    
    System.out.println("Sampling input to effect total-order sort...");

    Path inputDir = FileInputFormat.getInputPaths(jobConf)[0];
    inputDir = inputDir.makeQualified(inputDir.getFileSystem(jobConf));
    Path partitionFile = new Path(inputDir, "_sortPartitioning");
    TotalOrderPartitioner.setPartitionFile(jobConf, partitionFile);
    InputSampler.writePartitionFile(jobConf, sampler);


    URI partitionUri = new URI(partitionFile.toString() + "#" + "_sortPartitioning");
    DistributedCache.addCacheFile(partitionUri, jobConf);
    DistributedCache.createSymlink(jobConf);
    jobResult = JobClient.runJob(jobConf);
    return 0;
  }

  public static void main(String[] args) throws Exception {
    int res = ToolRunner.run(new Configuration(), new Sort(), args);
    System.exit(res);
  }

  /**
   * Get the last job that was run using this instance.
   * @return the results of the last job that was run
   */
  public RunningJob getResult() {
    return jobResult;
  }
}
