package com.devilvan.hadoop;


import org.apache.commons.io.IOUtils;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.*;
import org.apache.hadoop.fs.permission.FsAction;
import org.apache.hadoop.fs.permission.FsPermission;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.testng.annotations.Test;

import java.io.File;
import java.io.FileInputStream;
import java.io.FileOutputStream;
import java.io.IOException;
import java.net.URI;
import java.net.URISyntaxException;
import java.util.Arrays;

public class HdfsDemo {
    private static final Logger logger = LoggerFactory.getLogger(HdfsDemo.class);

    @Test
    public void getListStatus(){
        try {
            FileSystem fileSystem = getFileSystem();
//            FileStatus[] fileStatuses = fileSystem.listStatus(new Path("/HdfsPermission"));
            FileSystem localhost = FileSystem.get(new URI("localhost"), new Configuration());
            FileStatus[] fileStatuses = localhost.listStatus(new Path("D:\\scala"));
            Arrays.stream(fileStatuses).forEach(System.out::println);
        } catch (IOException | URISyntaxException e) {
            e.printStackTrace();
        }
    }

    /**
     * IO形式的下载文件
     * 关键方法：fileSystem.open() 创建输入流
     */
    @Test
    public void downloadFileWithIO(){
        try {
            logger.info("==================IO形式的下载文件开始==================");
            FileSystem fileSystem = getFileSystem();
            FSDataInputStream inputStream = fileSystem.open(new Path("/HdfsPermission/那坡县总表.xlsx"));
            FileOutputStream outputStream = new FileOutputStream(new File("D:\\那坡县总表.xlsx"));
            IOUtils.copy(inputStream,outputStream);
            outputStream.close();
            inputStream.close();
            fileSystem.close();
            logger.info("==================IO形式的下载文件结束==================");
        } catch (IOException e) {
            e.printStackTrace();
        }
    }


    /**
     * IO流形式的上传文件
     * 关键方法：fileSystem.create() 创建dfs输出流
     */
    @Test
    public void uploadFileWithIO(){
        try {
            logger.info("==================IO流形式的上传文件开始==================");
            FileSystem fileSystem = getFileSystem();
            FileInputStream fileInputStream = new FileInputStream(new File("G:\\那坡县总表.xlsx"));
            FSDataOutputStream fsDataOutputStream = fileSystem.create(new Path("/HdfsPermission/那坡县总表.xlsx"));
            IOUtils.copy(fileInputStream,fsDataOutputStream);
            logger.info("==================IO流形式的上传文件结束==================");
            fileSystem.close();
            fsDataOutputStream.close();
            fileInputStream.close();
        } catch (IOException e) {
            e.printStackTrace();
        }
    }

    /**
     * 合并本地小文件
     */
    @Test
    public void mergeFiles(){
        try {
            logger.info("==================合并本地小文件开始==================");
            FileSystem fileSystem = FileSystem.get(new URI("hdfs://node01:8020"), new Configuration(), "Devilvan");
            // 创建一个文件，存放合并后的结果
            FSDataOutputStream fsDataOutputStream = fileSystem.create(new Path("/HdfsPermission/mergeResult.txt"));
            LocalFileSystem localFileSystem = FileSystem.getLocal(new Configuration());
            FileStatus[] fileStatuses = localFileSystem.listStatus(new Path("D:\\BaiduNetdiskDownload\\开课吧\\开课吧大数据-Hadoop\\1、HDFS\\3、数据\\smallfile"));
            for (FileStatus fileStatus : fileStatuses) {
                //获取每一个本地的文件路径
                Path path = fileStatus.getPath();
                //读取本地小文件
                FSDataInputStream fsDataInputStream = localFileSystem.open(path);
                IOUtils.copy(fsDataInputStream,fsDataOutputStream);
                IOUtils.closeQuietly(fsDataInputStream);
            }
            localFileSystem.close();
            IOUtils.closeQuietly(fsDataOutputStream);
            fileSystem.close();
            logger.info("==================合并本地小文件结束==================");
        } catch (IOException | InterruptedException | URISyntaxException e) {
            e.printStackTrace();
        }
    }

    /**
     * 下载文件
     */
    @Test
    public void downloadFile(){
        try {
            logger.info("==================下载文件开始==================");
            FileSystem fileSystem = getFileSystem();
            fileSystem.copyToLocalFile(
                    false,
                    new Path("/HdfsPermission/mergeResult.txt"),
                    new Path("E:\\HdfsDownload\\mergeResult.txt"),
                    true);
            fileSystem.close();
            logger.info("==================下载文件结束==================");
        } catch (IOException e) {
            e.printStackTrace();
        }
    }

    /**
     * 上传文件
     */
    @Test
    public void uploadFile() {
        try {
            logger.info("==================上传文件开始==================");
            FileSystem fileSystem = getFileSystem();
            Path[] paths = {
                    new Path("E:\\实训需要\\apache-maven-3.6.3\\conf\\logging\\simplelogger.properties"),
                    new Path("E:\\实训需要\\apache-maven-3.6.3\\conf\\settings.xml"),
                    new Path("E:\\实训需要\\apache-maven-3.6.3\\conf\\toolchains.xml")
            };
            fileSystem.copyFromLocalFile(
                    false,true,paths,new Path("/HdfsPermission"));
            logger.info("==================上传文件结束==================");
        } catch (IOException e) {
            e.printStackTrace();
        }
    }

    /**
     * 创建一个文件
     * hdfs dfs -touchz 路径 + 文件名
     */
    @Test
    public void createFile() {
        try {
            logger.info("==================创建文件开始==================");
            FileSystem fileSystem = getFileSystem();
//            fileSystem.createFile(new Path("/theFile"));
            fileSystem.create(new Path("/HdfsDemo/theFile.txt"));
            fileSystem.close();
            logger.info("==================创建文件结束==================");
        } catch (IOException e) {
            e.printStackTrace();
        }
    }

    /**
     * 查看文件的详细信息
     */
    @Test
    public void getFileDetail(){
        FileSystem fileSystem;
        try {
            logger.info("==================获取文件详情开始==================");
            fileSystem = getFileSystem();
            RemoteIterator<LocatedFileStatus> listFiles =
                    fileSystem.listFiles(new Path("/HdfsPermission"), true);
            while (listFiles.hasNext()){
                LocatedFileStatus fileStatus = listFiles.next();
                String fileName = fileStatus.getPath().getName();
                System.out.println("文件名：" + fileName);
                BlockLocation[] blockLocations = fileStatus.getBlockLocations();
                for (BlockLocation blockLocation : blockLocations) {
                    // 获取block的IP、端口号：192.168.92.100:9866
                    String[] names = blockLocation.getNames();
                    for (String name : names) {
                        System.out.println("blockLocation name: " + name);
                    }
                    // 获取block域名，这里被转化成了本地起的别名：node01.devilvan.com
                    String[] hosts = blockLocation.getHosts();
                    for (String host : hosts) {
                        System.out.println("blockLocation host: " + host);
                    }
                    // 获取长度（单位：B）
                    long length = blockLocation.getLength();
                    System.out.println("blockLocation length: " + length);
                }
                System.out.println("\n");
            }
            logger.info("==================获取文件详情结束==================");
        } catch (Exception e) {
            e.printStackTrace();
        } finally {
        }
    }
    

    /**
     * 重命名文件
     * 如果两个Path的父级路径不同，则产生剪切的效果
     */
    @Test
    public void renameFile(){
        try {
            logger.info("==================重命名文件开始==================");
            FileSystem fileSystem = getFileSystem();
            fileSystem.rename(new Path("/HdfsDemo/mergeResult.txt"),new Path("/HdfsPermission/mergeResult.txt"));
            fileSystem.close();
            logger.info("==================重命名文件结束==================");
        } catch (IOException e) {
            e.printStackTrace();
        }
    }

    /**
     * 在hdfs中删除目录下的所有文件
     */
    @Test
    public void removeDirOnHDFS() {
        try {
            logger.info("==================删除目录开始==================");
            FileSystem fileSystem = getFileSystem();
            fileSystem.delete(new Path("/HdfsDemo"), true);
            logger.info("==================删除目录结束==================");
            fileSystem.close();
        } catch (IOException e) {
            e.printStackTrace();
        }
    }

    /**
     * 创建目录时指定权限
     */
    @Test
    public void mkdirWithPermission() {
        try {
            logger.info("==================创建目录（带权限）开始==================");
            FileSystem fileSystem = getFileSystem();
            // 设置权限：文件所有者（所有）、组用户权限（只读）、其他用户权限（只读）
            FsPermission fsPermission = new FsPermission(FsAction.READ, FsAction.READ, FsAction.READ);
            fileSystem.mkdirs(new Path("/HdfsPermission"), fsPermission);
            fileSystem.close();
            logger.info("==================创建目录（带权限）结束==================");
        } catch (IOException e) {
            e.printStackTrace();
        }
    }


    /**
     * 在hdfs中创建目录
     */
    @Test
    public void mkdirOnHDFS() {
        try {
            logger.info("==================创建目录开始==================");
            FileSystem fileSystem = getFileSystem();
            fileSystem.mkdirs(new Path("/HdfsDemo"));
            logger.info("==================创建目录结束==================");
            fileSystem.close();
        } catch (IOException e) {
            e.printStackTrace();
        }
    }

    public static FileSystem getFileSystem() {
        FileSystem fileSystem = null;
        try {
            Configuration configuration = new Configuration();
            /*configuration.set("fs.defaultFS", "hdfs://192.168.92.100:8020");
            FileSystem fileSystem = FileSystem.get(configuration);*/
            fileSystem = FileSystem.get(new URI("hdfs://node01:8020"), configuration, "Devilvan");
        } catch (IOException | URISyntaxException | InterruptedException e) {
            e.printStackTrace();
        }
        return fileSystem;
    }


}
