package com.snda.lidejia.hivetotalsort;

import com.snda.lidejia.hivetotalsort.mr.HiveSortFileInputFormat;
import com.snda.lidejia.hivetotalsort.mr.HiveSortReducer;
import com.snda.lidejia.hivetotalsort.type.HiveSortKey;
import java.io.IOException;
import java.net.URI;
import java.util.logging.Level;
import java.util.logging.Logger;
import org.apache.hadoop.conf.Configured;
import org.apache.hadoop.filecache.DistributedCache;
import org.apache.hadoop.fs.FileStatus;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.NullWritable;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapred.FileInputFormat;
import org.apache.hadoop.mapred.FileOutputFormat;
import org.apache.hadoop.mapred.JobClient;
import org.apache.hadoop.mapred.JobConf;
import org.apache.hadoop.mapred.TextOutputFormat;
import org.apache.hadoop.mapred.lib.InputSampler;
import org.apache.hadoop.mapred.lib.TotalOrderPartitioner;
import org.apache.hadoop.util.Tool;
import org.apache.hadoop.util.ToolRunner;

/**
 * Hello world!
 *
 */
public class Sort extends Configured implements Tool {

    public static void main(String[] args) throws Exception {
        long startTime = System.currentTimeMillis();
        int code = ToolRunner.run(new Sort(), args[0].split(" "));
        System.out.println((System.currentTimeMillis() - startTime) / 1000.0);
        System.exit(code);
    }

    private void addAllFilesInDir(Path root, JobConf config, FileSystem fs) {
        try {
            FileStatus[] files = fs.listStatus(root);
            for (FileStatus file : files) {
                if (file.isDir()) {
                    addAllFilesInDir(file.getPath(), config, fs);
                } else {
                    FileInputFormat.addInputPath(config, file.getPath());
                }
            }
        } catch (IOException ex) {
            Logger.getLogger(Sort.class.getName()).log(Level.SEVERE, null, ex);
        }

    }

    public int run(String[] args) throws Exception {
        //
        JobConf config = new JobConf(getConf());
        config.set("hive.totalsort.meta", args[0]);
        //
        config.setJarByClass(Sort.class);
        config.setJobName("Hive Total Sort");
        //
        FileSystem fs = FileSystem.get(config);
        try {
            addAllFilesInDir(new Path(args[1]), config, fs);
        } finally {
            fs.close();
        }
        FileOutputFormat.setOutputPath(config, new Path(args[2]));
        //
        config.setInputFormat(HiveSortFileInputFormat.class);
        config.setOutputFormat(TextOutputFormat.class);
        //
        config.setReducerClass(HiveSortReducer.class);
        //
        config.setMapOutputKeyClass(HiveSortKey.class);
        config.setMapOutputValueClass(Text.class);
        config.setOutputKeyClass(NullWritable.class);
        config.setOutputValueClass(Text.class);

        config.setPartitionerClass(TotalOrderPartitioner.class);

        config.setNumReduceTasks(Integer.parseInt(args[5]));
        //
        long startTime = System.currentTimeMillis();
        InputSampler.Sampler<HiveSortKey, Text> sampler = new InputSampler.RandomSampler<HiveSortKey, Text>(0.1, Integer.parseInt(args[3]), Integer.parseInt(args[4]));
        Path partitionFile = new Path("/tmp/", "_partitions");
        TotalOrderPartitioner.setPartitionFile(config, partitionFile);
        InputSampler.writePartitionFile(config, sampler);
        URI partitionUri = new URI(partitionFile.toString() + "#_partitions");
        DistributedCache.addCacheFile(partitionUri, config);
        DistributedCache.createSymlink(config);
        System.out.println(String.format("sampler costed %s", (System.currentTimeMillis() - startTime) / 1000.0));
        //
        JobClient.runJob(config);
        return 0;
    }
}
