/*
 * To change this template, choose Tools | Templates
 * and open the template in the editor.
 */

package maxent.mr.io;

import java.io.IOException;
import java.util.Map;
import java.util.TreeMap;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FSDataInputStream;
import org.apache.hadoop.fs.FSDataOutputStream;
import org.apache.hadoop.fs.FileStatus;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;

/**
 *
 * This class is used to merge all hadoop output files into one single file in HDFS
 * @author hadoop
 */

public class HDFSFileUtil {


    /*
     * Merge hadoop output files into single file, then sort the content based on key
     * @param String inputDir : input directory of hadoop output files
     * @param String outputDir: output file
     */
    public static void MergeAndSort(String inputDir, String outputDir) throws IOException{

        Configuration conf = new Configuration();
        FileSystem fs = FileSystem.get(conf);
        FileStatus[] inputFiles = fs.listStatus(new Path(inputDir));
        Map<Integer, String> map=new TreeMap<Integer, String>();

        for(int i=0; i<inputFiles.length;i++){

            if(!inputFiles[i].getPath().getName().equalsIgnoreCase("_logs")){
                FSDataInputStream in = fs.open(inputFiles[i].getPath());
                String lines;

                while((lines=in.readLine()) != null) {
                    try{
                        map.put(getField(lines), getValue(lines));
                    }catch(Exception e){
                        e.getMessage();
                    }
                }
            }
        }


        FSDataOutputStream out = fs.create(new Path(outputDir));
        for(String val : map.values()){
                out.writeBytes(val);
                out.write('\n');
        }
    }

    // get key
    private static int getField(String line) {
        return Integer.parseInt(line.split("\t")[0]);//extract value you want to sort on
    }


    // get value
    private static String getValue(String line){
        return line.split("\t")[1];
    }


    /*
     * Merge hadoop output files into single file
     * 
     */
    public static void Merge(String inputDir, String outputDir){

        try {
            Configuration conf = new Configuration();
            FileSystem fs = FileSystem.get(conf);
            FileStatus[] inputFiles = fs.listStatus(new Path(inputDir));
            
            // output stream
            FSDataOutputStream out = fs.create(new Path(outputDir));

            for (int i = 0; i < inputFiles.length; i++) {
                // don't process the log files
                if (!inputFiles[i].getPath().getName().equalsIgnoreCase("_logs")) {
                    // input stream
                    FSDataInputStream in = fs.open(inputFiles[i].getPath());

                    byte buffer[] = new byte[256];
                    int bytesRead = 0;

                    while ((bytesRead = in.read(buffer)) > 0) {
                        // write into hdfs
                        out.write(buffer, 0, bytesRead);
                    }
                    in.close();
                }
                out.close();
            }
        } catch (IOException iOException) {
            iOException.printStackTrace();
        }
    }

}
