package cn.tedu.hdfs;

import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FSDataOutputStream;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.IOUtils;
import org.junit.Test;

import java.io.*;
import java.net.URI;

public class HDFSDemo {
    // 上传文件
    @Test
    public void put() throws IOException, InterruptedException {
        Configuration conf = new Configuration();
        FileSystem fs = FileSystem.get(URI.create("hdfs://47.95.28.19:9000"),conf,"root");
        // 上传文件
        // 获取到输出流
        OutputStream out = fs.create(new Path("/alipay-sdk-java-4.9.9.jar"));
        FileInputStream in = new FileInputStream("e://alipay-sdk-java-4.9.9.jar");
        in.close();
        out.close();
    }
    // 下载文件
    @Test
    public void get() throws IOException, InterruptedException {
        // 构建环境参数
        Configuration conf = new Configuration();
        FileSystem fs = FileSystem.get(URI.create("hdfs://47.95.28.19:9000"),conf,"root");
        InputStream in = fs.open(new Path("/a.txt"));
        // 构建一个输出流
        FileOutputStream out = new FileOutputStream("e:/b.txt");
        int len ;
        byte[] bs = new byte[1024];
        while ((len = in.read(bs)) != -1){
            out.write(bs,0,len);
        }
        IOUtils.copyBytes(in,out,conf);
        in.close();
        out.close();
    }
    // 删除文件
    @Test
    public void delete() throws IOException, InterruptedException {
        Configuration conf = new Configuration();
        FileSystem fs = FileSystem.get(URI.create("hdfs://47.95.28.19:9000"),conf,"root");
        // 指定啊哟删除的文件
        // 第二个参数表示是否递归删除
        fs.delete(new Path("/a.txt"),true);
    }
}
