package com.yuanzheng;

import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FSDataOutputStream;
import org.apache.hadoop.fs.FileStatus;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.IOUtils;
import org.junit.After;
import org.junit.Before;
import org.junit.Test;

import java.io.FileInputStream;
import java.io.IOException;

/**
 * @author yuanzheng
 * @date 2020/4/22-13:34
 */
public class HdfsDemo {

    private FileSystem fileSystem;

    @Before
    public void before() throws IOException {
        System.setProperty("HADOOP_USER_NAME", "root");
        /*可关闭hdfs的权限*/
        Configuration configuration = new Configuration();
        configuration.set("dfs.replication","1");
        configuration.set("fs.defaultFS", "101.201.69.99:8020");
        this.fileSystem = FileSystem.get(configuration);
    }

    @Test
    public void test() throws IOException {
        Path path = new Path("/home/hadoop/");
        FileStatus[] fileStatuses = fileSystem.listStatus(path);
        for (FileStatus fileStatus : fileStatuses) {
            long blockSize = fileStatus.getBlockSize();
            System.out.println(blockSize);

        }
    }

    @Test
    public void upload() throws IOException {
        FileInputStream fileInputStream = new FileInputStream("C:\\Users\\dell\\Desktop\\evaluate.txt");
        FSDataOutputStream fsDataOutputStream = fileSystem.create(new Path("/evaluate.log"));
        IOUtils.copyBytes(fileInputStream, fsDataOutputStream, 1024, true);
    }

    @Test
    public void downLoad() throws IOException {
        Path source = new Path("/home/hadoop/yuan.log");
        Path dst = new Path("C:\\Users\\dell\\Desktop\\");
        fileSystem.copyToLocalFile(false, source, dst, true);
    }

    @After
    public void after() throws IOException {
        fileSystem.close();
    }
}
