package com.shujia.hdfs;

import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.*;
import org.junit.Before;
import org.junit.Test;

import java.io.BufferedReader;
import java.io.InputStreamReader;
import java.net.URI;

/**
 * @author yangjiming
 * @create 2021-04-05 19:43
 */
public class HDFSApi {
    FileSystem fs;

    @Before
    public void init() throws Exception {
        //获取配置文件
        Configuration conf = new Configuration();
        //配置副本数
        conf.set("dfs.replication", "1");
        //获取hdfs连接地址
        URI uri = new URI("hdfs://master:9000");
        fs = FileSystem.get(uri, conf);
    }

    @Test
    public void mkdir() throws Exception {
        Path path = new Path("/test/a/b/c");
        boolean mkdirs = fs.mkdirs(path);
        System.out.println(mkdirs);
    }

    @Test
    public void delete() throws Exception {
        Path path = new Path("/data/test.txt");
        //参数为true是如果目录里有子目录和文件则会一起删除
        //为false时则不会删
        boolean delete = fs.delete(path, true);
        System.out.println(delete);
    }

    @Test
    public void list() throws Exception {
        FileStatus[] fileStatuses = fs.listStatus(new Path("/data"));
        for (FileStatus fileStatus : fileStatuses) {
            Path path = fileStatus.getPath();
            System.out.println(path.getName());
            long len = fileStatus.getLen();
            System.out.println(len);
            long blockSize = fileStatus.getBlockSize();
            System.out.println(blockSize);
            System.out.println();
        }
    }

    @Test
    public void getFile() throws Exception {
        FileStatus fileStatus = fs.getFileStatus(new
                Path("/data/User.java"));
        String name = fileStatus.getPath().getName();
        System.out.println(name);
        long blockSize = fileStatus.getBlockSize();
        System.out.println(blockSize);
    }

    @Test
    public void read() throws Exception {
        FSDataInputStream open = fs.open(
                new Path("/data/test.txt"));
        BufferedReader br = new BufferedReader(
                new InputStreamReader(open));
        String line;
        while ((line = br.readLine()) != null) {
            System.out.println(line);
        }
        open.close();
    }

    @Test
    public void write() throws Exception {
        FSDataOutputStream fsDataOutputStream = fs.create(
                new Path("/data/test.txt"), true);
        fsDataOutputStream.writeUTF("hadoop");
        fsDataOutputStream.writeUTF("中国");
        fsDataOutputStream.writeUTF("你好");
        fsDataOutputStream.close();
    }

    @Test
    public void put() throws Exception {
        Path localPath = new Path("E:\\BigData_Learn\\IdeaProjects\\BigData\\Hadoop\\data\\test.txt");
        Path hdfsPath = new Path("/data/");
        fs.copyFromLocalFile(localPath, hdfsPath);
    }

    @Test
    public void get() throws Exception {
        Path localPath = new Path("Hadoop/data/student.txt");
        Path hdfsPath = new Path("/data/student.txt");
        fs.copyToLocalFile(false, hdfsPath, localPath, true);
    }

}
