package com.shujia.hadoop;

import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.*;
import org.apache.hadoop.fs.permission.FsPermission;
import org.junit.Before;
import org.junit.Test;

import java.io.BufferedReader;
import java.io.IOException;
import java.io.InputStreamReader;
import java.net.URI;

public class Demo1HdfsJavaApi {

    FileSystem fileSystem;

    @Before
    public void init() throws Exception {

        //hadoop配置文件对象
        Configuration configuration = new Configuration();

        //设置上传副本的数量，默认是3个
        configuration.set("dfs.replication","1");

        //连接hdfs地址
        URI uri = new URI("hdfs://master:9000");

        //创建hdfs 文件系统的对象，通过这个对象可以操作hdfs中的文件
        fileSystem = FileSystem.get(uri, configuration);

    }

    @Test
    public void mkdir() {
        try {
            //创建一个目录
            fileSystem.mkdirs(new Path("/hadoop"));
        } catch (IOException e) {
            e.printStackTrace();
        }
    }

    @Test
    public void delete() {
        try {
            //删除目录或者文件
            fileSystem.delete(new Path("/java"), true);
        } catch (IOException e) {
            throw new RuntimeException(e);
        }
    }

    @Test
    public void listStatus() {
        try {
            //查看目录下的文件列表
            FileStatus[] fileStatuses = fileSystem.listStatus(new Path("/data/"));

            //遍历返回的所有文件
            for (FileStatus fileStatus : fileStatuses) {

                Path path = fileStatus.getPath();
                long len = fileStatus.getLen();
                long blockSize = fileStatus.getBlockSize();
                FsPermission permission = fileStatus.getPermission();

                System.out.println(path.getName() + "\t" + len + "\t" + blockSize + "\t" + permission.toString());
            }
        } catch (IOException e) {
            throw new RuntimeException(e);
        }
    }

    @Test
    public void getFileStatus() {
        try {
            //获取一个文件或者目录的信心
            FileStatus fileStatus = fileSystem.getFileStatus(new Path("/data/student/students.txt"));

            Path path = fileStatus.getPath();
            long len = fileStatus.getLen();
            long blockSize = fileStatus.getBlockSize();
            FsPermission permission = fileStatus.getPermission();

            System.out.println(path.getName() + "\t" + len + "\t" + blockSize + "\t" + permission.toString());
        } catch (IOException e) {
            throw new RuntimeException(e);
        }
    }

    @Test
    public void getFileBlockLocations() {
        try {
            BlockLocation[] locations = fileSystem.getFileBlockLocations(new Path("/data/student/students.txt"), 0, 10);

            for (BlockLocation location : locations) {
                String[] hosts = location.getHosts();
                for (String host : hosts) {
                    System.out.println(host);
                }

                String[] names = location.getNames();
                for (String name : names) {
                    System.out.println(name);
                }

                long length = location.getLength();
                System.out.println(length);

            }
        } catch (IOException e) {
            throw new RuntimeException(e);
        }
    }

    @Test
    public void open() {
        try {

            //获取hdfs文件的输入流，FSDataInputStream 是InputStream的子类
            FSDataInputStream open = fileSystem.open(new Path("/data/student/students.txt"));

            //创建一个缓冲流，按行读取
            BufferedReader bufferedReader = new BufferedReader(new InputStreamReader(open));

            String line;

            while ((line = bufferedReader.readLine()) != null) {
                System.out.println(line);
            }

            //关闭流对象
            open.close();

        } catch (IOException e) {
            throw new RuntimeException(e);
        }
    }

    @Test
    public void creat() {
        try {

            //创建一个文件，得到一个输出流
            FSDataOutputStream fsDataOutputStream = fileSystem.create(new Path("/java/java.txt"), false);

            //将数据保存到文件中
            fsDataOutputStream.writeUTF("hadoop");
            fsDataOutputStream.writeUTF("\n");
            fsDataOutputStream.writeUTF("java");

            //关闭输出流对象
            fsDataOutputStream.close();

        } catch (IOException e) {
            throw new RuntimeException(e);
        }
    }

    @Test
    public void copyFromLocalFile() {

        //本地文件
        Path localFile = new Path("data/students.txt");

        //hdfs
        Path hdfsFile = new Path("/hadoop");

        try {
            //将本地文件上传到hdfs,相当于put命令
            fileSystem.copyFromLocalFile(localFile, hdfsFile);
        } catch (IOException e) {
            throw new RuntimeException(e);
        }
    }

    @Test
    public void copyToLocalFile() {

        //本地路径
        Path localFile = new Path("data/students_tmp.txt");

        //hdfs上的文件
        Path hdfsFile = new Path("/hadoop/students.txt");

        try {
            //将hdfs上的文件下载到本地，相当于get命令
            fileSystem.copyToLocalFile(false, hdfsFile, localFile, true);
        } catch (IOException e) {
            throw new RuntimeException(e);
        }
    }
}
