package cn.dglydrpy.study.j2ee.hdfs;

import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.*;

import java.io.BufferedReader;
import java.io.IOException;
import java.io.InputStreamReader;
import java.net.URI;

/**
 * HDFS API 操作
 */
public class TestHDFS {
    public  static  FileSystem fs = null;
    public static void main(String[] args) throws IOException, InterruptedException {
        //创建conf对象
        Configuration conf = new Configuration();

        //创建FileSystem对象
        fs = FileSystem.get(URI.create("hdfs://node1:8020"), conf, "root");

        //查看HDFS中某个路径下的文件有哪些
        listHDFSFiles("/");
        
        //在HDFS中创建目录
        mkdirOnHDFS("/testdir");

        //向HDFS中上传文件
        writeFileToHDFS("./data/data.txt","/testdir/data.txt");

        //对HDFS文件进行重命名
        renameHDFSFile("/testdir/data.txt","/testdir/text.txt");

        //获取HDFS文件详细信息
        getHDFSFileInfos("/testdir/text.txt");

        //读取HDFS文件数据
        readFileFromHDFS("/testdir/text.txt");

        //删除HDFS文件或者目录
        deleteFileOrDirFromHDFS("/testdir");

    }

    private static void deleteFileOrDirFromHDFS(String hdfsFileOrDirPath) throws IOException {
        //判断目录是否存在
        Path path = new Path(hdfsFileOrDirPath);
        if(!fs.exists(path)){
            System.out.println("HDFS目录或者文件不存在！");
            return;
        }

        //删除文件或者目录

        boolean result = fs.delete(path, true);
        if(result){
            System.out.println("HDFS目录或者文件删除成功！");

        }else{

            System.out.println("HDFS目录或者文件删除失败！");
        }

    }

    private static void readFileFromHDFS(String hdfsFilePath) throws IOException {
        //读取HDFS文件数据
        Path path = new Path(hdfsFilePath);
        FSDataInputStream in = fs.open(path);
        BufferedReader br = new BufferedReader(new InputStreamReader(in));
        String newLine = "";
        while((newLine = br.readLine()) != null){
            System.out.println(newLine);

        }
        br.close();
        in.close();


    }

    private static void getHDFSFileInfos(String hdfsFilePath) throws IOException {
        Path path = new Path(hdfsFilePath);
        RemoteIterator<LocatedFileStatus> listFilesIterator = fs.listFiles(path, true);
        while(listFilesIterator.hasNext()){
            LocatedFileStatus fileStatus = listFilesIterator.next();
            System.out.println("文件详细信息如下：");
            System.out.println("权限："+fileStatus.getPermission());
            System.out.println("所有者："+fileStatus.getOwner());
            System.out.println("大小："+fileStatus.getLen());
            System.out.println("块大小："+fileStatus.getBlockSize());
            System.out.println("文件名："+fileStatus.getPath().getName());

            //获取块的详情
            BlockLocation[] blockLocations = fileStatus.getBlockLocations();
            for (BlockLocation blockLocation : blockLocations) {

                System.out.println("block信息："+blockLocation);
            }


        }

    }

    private static void renameHDFSFile(String hdfsOldFileName, String hdfsNewFileName) throws IOException {
        fs.rename(new Path(hdfsOldFileName),new Path(hdfsNewFileName));
        System.out.println("重命名成功！");
    }


    private static void writeFileToHDFS(String localFilePath, String hdfsFilePath) throws IOException {
        Path path = new Path(hdfsFilePath);
        if(fs.exists(path)){
            fs.delete(path,true);
        }

        fs.copyFromLocalFile(false,true,new Path(localFilePath),path);
        System.out.println("上传文件成功！");

    }

    private static void mkdirOnHDFS(String disPath) throws IOException {
        Path path = new Path(disPath);
        if(fs.exists(path)){
            System.out.println("当前"+disPath+"已经存在！");
            return;
        }

        //创建HDFS目录
        boolean result = fs.mkdirs(path);
        if(result){
            System.out.println("创建"+disPath+"成功！");
        }else{

            System.out.println("创建"+disPath+"失败！");
        }
    }

    private static void listHDFSFiles(String hdfsPath) throws IOException {
        FileStatus[] fileStatuses = fs.listStatus(new Path(hdfsPath));
        for (FileStatus fileStatus : fileStatuses) {
            if(fileStatus.isDirectory()){
                listHDFSFiles(fileStatus.getPath().toString());
            }
            System.out.println(fileStatus.getPath());
        }



    }
}
