package com.learning.hadoop.putmerge;

import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FSDataInputStream;
import org.apache.hadoop.fs.FSDataOutputStream;
import org.apache.hadoop.fs.FileStatus;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.IOUtils;
import org.apache.hadoop.security.UserGroupInformation;

import java.io.IOException;


/**
 * @author Sam Sho
 */
public class PutMerge {

    /**
     * 本地小文件合并上传到HDFS文件系统中
     * 参考：《hadoop in action》
     *
     * @param LocalDir 本地目录
     * @param fsFile   HDFS上的文件
     * @throws IOException
     */
    public static void putMergeFunc(String LocalDir, String fsFile) throws IOException {
        Configuration conf = new Configuration();
        conf.set("fs.defaultFS", "hdfs://newlistest02:9000");
        System.setProperty("HADOOP_USER_NAME", "sinosoft");

        //fs是HDFS文件系统
        FileSystem fs = FileSystem.get(conf);
        //本地文件系统
        FileSystem local = FileSystem.getLocal(conf);

        Path localDir = new Path(LocalDir);
        Path HDFSFile = new Path(fsFile);

        //得到输入目录
        FileStatus[] status = local.listStatus(localDir);
        //在HDFS上创建输出文件
        FSDataOutputStream out = fs.create(HDFSFile);

        for (FileStatus st : status) {
            Path temp = st.getPath();
            FSDataInputStream in = local.open(temp);
            //读取in流中的内容放入out
            IOUtils.copyBytes(in, out, 4096, false);
            //完成后，关闭当前文件输入流
            in.close();
        }
        out.close();
    }

    public static void main(String[] args) throws IOException {
        String l = "/data/learning-hadoop";
        String f = "hdfs://newlistest02:9000/putMerge.txt";
        putMergeFunc(l, f);
    }
}