package com.dzx;

import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.*;
import org.apache.hadoop.hdfs.DistributedFileSystem;
import org.apache.hadoop.io.IOUtils;

import java.io.IOException;
import java.io.PrintStream;
import java.io.PrintWriter;
import java.net.URI;

/**
 * @author DuanZhaoXu
 * @ClassName:
 * @Description:
 * @date 2018年12月07日 12:28:22
 * <p>
 * 利用自定义的PathFilter ,过滤获取 /user/hadoop 目录下 .abc为后缀的文件，合并成 merge.txt
 */

class MyPathFilter implements PathFilter {

    public String reg;

    public MyPathFilter(String reg) {
        this.reg = reg;
    }

    @Override
    public boolean accept(Path path) {
        if (path.toString().matches(this.reg)) {
            return true;
        }
        return false;
    }
}

public class Merge {

    Path inputPath = null;
    Path outputPath = null;

    public Merge(String inputPath, String outputPath) {
        this.inputPath = new Path(inputPath);
        this.outputPath = new Path(outputPath);
    }

    public void merge() throws Exception {
        Configuration configuration = new Configuration();
        configuration.set("fs.defaultFS", "hdfs://192.168.42.85:8020");
        configuration.set("fs.hdfs.impl", "org.apache.hadoop.hdfs.DistributedFileSystem");
        FileSystem fsSource = FileSystem.get(URI.create(inputPath.toString()), configuration, "root");
        FileSystem fsDst = FileSystem.get(URI.create(outputPath.toString()), configuration, "root");

        FileStatus[] fileStatuses = fsSource.listStatus(inputPath, new MyPathFilter(".*\\.abc"));
        FSDataOutputStream fsdos = fsDst.create(outputPath);
        for (FileStatus fileStatus : fileStatuses) {
            FSDataInputStream open = fsSource.open(fileStatus.getPath());
            //直接用ioutil的方式
            // IOUtils.copyBytes(open,fsdos,1024);
            //传统方式
            byte[] bytes = new byte[1024];
            int read = -1;
            PrintStream printStream = new PrintStream(System.out);
            while ((read = open.read(bytes)) > 0) {
                printStream.write(bytes, 0, read);
                fsdos.write(bytes, 0, read);
            }
            open.close();
            printStream.close();
        }
        fsdos.close();
    }

    public static void main(String[] args) throws Exception {

        FileSystemUtil.delete("/user/hadoop");
        FileSystemUtil.copyFromLocal("C:\\Users\\DuanZhaoXu\\Desktop\\1.log", "/user/hadoop/1.log");
        FileSystemUtil.copyFromLocal("C:\\Users\\DuanZhaoXu\\Desktop\\2.log", "/user/hadoop/2.log");
        FileSystemUtil.copyFromLocal("C:\\Users\\DuanZhaoXu\\Desktop\\3.abc", "/user/hadoop/3.abc");
        FileSystemUtil.copyFromLocal("C:\\Users\\DuanZhaoXu\\Desktop\\4.abc", "/user/hadoop/4.abc");

        Merge merge = new Merge("/user/hadoop/", "/user/hadoop/merge.txt");
        merge.merge();
        FileSystemUtil.copyToLocal("/user/hadoop/merge.txt", "C:\\\\Users\\\\DuanZhaoXu\\\\Desktop\\\\m.txt");
    }
}
