package cc.lucien.hadoop;

import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.*;

import java.io.IOException;
import java.io.PrintStream;
import java.net.URI;

/**
 * 利用HDFS提供的Java API 合并符合要求的文件
 */
public class MyMergeFile {
    private Path inputPath = null;    //待合并文件所在的目录
    private Path outputPath = null;   //合并后输出的文件路径

    public MyMergeFile(String input, String output) {
        this.inputPath = new Path(input);
        this.outputPath = new Path(output);
    }

    public void doMerge() throws IOException {
        // 创建一个配置对象
        Configuration conf = new Configuration();
        conf.set("fs.defaultFS","hdfs://10.24.130.132:9000");
        conf.set("fs.hdfs.impl","org.apache.hadoop.hdfs.DistributedFileSystem");
        // 创建源数据 fs对象
        FileSystem fsSource = FileSystem.get(URI.create(inputPath.toString()),conf);
        FileSystem fsDst = FileSystem.get(URI.create(outputPath.toString()),conf);
        //过滤掉输⼊⽬录中扩展名为.abc的⽂件
        FileStatus[] sourceStatus = fsSource.listStatus(inputPath, new MyPathFilter(".*\\.abc"));
        // 根据outputPath 获取一个输出流对象
        FSDataOutputStream fsdos = fsDst.create(outputPath);
        // 获取标准输出流
        PrintStream ps = new PrintStream(System.out);
        //分别读取过滤后的每个⽂件的内容，并输出到同⼀个⽂件中
        for(FileStatus sta : sourceStatus) {
            //输出扩展名不为.abc的⽂件的路径、⽂件⼤⼩
            System.out.print("路径：" + sta.getPath() + " ⽂件⼤⼩：" + sta.getLen() +
                    " 权限：" + sta.getPermission() + " 内容：");
            FSDataInputStream fsdis = fsSource.open(sta.getPath());
            byte[] data = new byte[1024];
            int read = -1;
            while ((read = fsdis.read(data)) > 0) {
                // 输出在控制台
                ps.write(data, 0, read);
                // 写入到对应目录下
                fsdos.write(data, 0, read);
            }
            // 关闭资源
            fsdis.close();
        }
        // 关闭资源
        ps.close();
        fsdos.close();
    }

    public static void main(String[] args) throws IOException {
        System.setProperty("HADOOP_USER_NAME","hadoop");
        MyMergeFile merge = new MyMergeFile(
                "hdfs://10.24.130.132:9000/user/hadoop/input/",
                "hdfs://10.24.130.132:9000/user/hadoop/input/merge.txt"
        );
        merge.doMerge();
    }
}
