package org.apache.hdfs;

import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileStatus;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;

import java.io.IOException;
import java.net.URI;
import java.net.URISyntaxException;

public class GetPartFile {
    public static void uploadDirectory(String srcPath, String dstPath) throws IOException, URISyntaxException, InterruptedException {
        Configuration conf = new Configuration();
        FileSystem fs = FileSystem.get(new URI("hdfs://node-1:9000"), conf, "niit");
        Path src = new Path(srcPath);
        Path dst = new Path(dstPath);                       //hdfs根目录为 /
        fs.copyFromLocalFile(src, dst);
        System.out.println("Upload to" + conf.get("fs.default.name"));
        FileStatus files[] = fs.listStatus(dst);
        for (FileStatus file : files) {
            System.out.println(file.getPath());

        }
    }


    public static void downloadFile(String dstPath, String srcPath) throws IOException, URISyntaxException, InterruptedException {
        Configuration configuration = new Configuration();
        //获取文件系统
        FileSystem fs = FileSystem.get(new URI("hdfs://node-1:9000"), configuration, "niit");
        //拷贝到本地
        /*
         * copyToLocalFile方法的
         * 参数1：是否删除原文件
         * 参数2：hadoop的文件路径
         * 参数3：本地路径
         * 参数4：是否开启校验
         * */
        fs.copyToLocalFile(false, new Path(dstPath), new Path(srcPath), false);

        //关闭文件系统
        fs.close();
    }

    public static void main(String[] args) throws Exception {



//        uploadDirectory("D:/output3","hdfs://node-1:9000/");
        downloadFile("/output3", "output/");
    }
}
