package com.snda.dw.lidejia.hadooptotalsort;

import com.snda.dw.lidejia.hadooptotalsort.meta.SortKeyMetaIO;
import com.snda.dw.lidejia.hadooptotalsort.mr.SortKeysFileInputFormat;
import com.snda.dw.lidejia.hadooptotalsort.mr.SortKeysReducer;
import com.snda.dw.lidejia.hadooptotalsort.type.SortKeys;
import java.io.IOException;
import java.net.URI;
import java.util.logging.Level;
import java.util.logging.Logger;
import org.apache.hadoop.conf.Configured;
import org.apache.hadoop.filecache.DistributedCache;
import org.apache.hadoop.fs.FileStatus;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.NullWritable;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapred.FileInputFormat;
import org.apache.hadoop.mapred.FileOutputFormat;
import org.apache.hadoop.mapred.JobClient;
import org.apache.hadoop.mapred.JobConf;
import org.apache.hadoop.mapred.TextOutputFormat;
import org.apache.hadoop.mapred.lib.InputSampler;
import org.apache.hadoop.mapred.lib.TotalOrderPartitioner;
import org.apache.hadoop.util.Tool;
import org.apache.hadoop.util.ToolRunner;

/**
 * Hello world!
 *
 */
public class Sort extends Configured implements Tool {

    public static void main(String[] args) throws Exception {
        long startTime = System.currentTimeMillis();
        int code = ToolRunner.run(new Sort(), args[0].split(" "));
        System.out.println(String.format("Time costed: %ss", (System.currentTimeMillis() - startTime) / 1000.0));
        System.exit(code);
    }

    private void addAllFilesInDir(Path root, JobConf conf, FileSystem fs) {
        try {
            FileStatus[] files = fs.listStatus(root);
            for (FileStatus file : files) {
                if (file.isDir()) {
                    addAllFilesInDir(file.getPath(), conf, fs);
                } else {
                    FileInputFormat.addInputPath(conf, file.getPath());
                }
            }
        } catch (IOException ex) {
            Logger.getLogger(Sort.class.getName()).log(Level.SEVERE, null, ex);
        }
    }

    @Override
    public int run(String[] args) throws Exception {
        // args
        String meta = args[0];
        String inputPath = args[1];
        String outputPath = args[2];
        int sampleTotal = Integer.parseInt(args[3]);
        int splitsTotal = Integer.parseInt(args[4]);
        int reduceTaskNum = Integer.parseInt(args[5]);
        // job config
        JobConf config = new JobConf(getConf());
        // distribute metas for later use
        Path metaPath = new Path("/tmp/_metas");
        SortKeyMetaIO.writeTo(metaPath, meta);
        URI metaURI = new URI(metaPath.toString() + "#_metas");
        DistributedCache.addCacheFile(metaURI, config);
        DistributedCache.createSymlink(config);
        //
        config.setJarByClass(Sort.class);
        config.setJobName("Hadoop Total Sort");
        // inputs
        FileSystem fs = FileSystem.get(config);
        try {
            addAllFilesInDir(new Path(inputPath), config, fs);
        } finally {
            fs.close();
        }
        // output
        FileOutputFormat.setOutputPath(config, new Path(outputPath));
        //
        config.setInputFormat(SortKeysFileInputFormat.class);
        config.setOutputFormat(TextOutputFormat.class);
        //
        config.setReducerClass(SortKeysReducer.class);
        //
        config.setMapOutputKeyClass(SortKeys.class);
        config.setMapOutputValueClass(Text.class);
        config.setOutputKeyClass(NullWritable.class);
        config.setOutputValueClass(Text.class);
        //
        config.setNumReduceTasks(reduceTaskNum);
        // partion
        config.setPartitionerClass(TotalOrderPartitioner.class);
        long startTime = System.currentTimeMillis();
        InputSampler.Sampler<SortKeys, Text> sampler = new InputSampler.RandomSampler<SortKeys, Text>(0.1, sampleTotal, splitsTotal);
        Path partitionFile = new Path("/tmp/", "_partitions");
        TotalOrderPartitioner.setPartitionFile(config, partitionFile);
        InputSampler.writePartitionFile(config, sampler);
        URI partitionUri = new URI(partitionFile.toString() + "#_partitions");
        DistributedCache.addCacheFile(partitionUri, config);
        DistributedCache.createSymlink(config);
        System.out.println(String.format("sampler costed %s", (System.currentTimeMillis() - startTime) / 1000.0));
        //
        JobClient.runJob(config);
        return 0;
    }
}
