package com.xzx.hadoop;

import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.BlockLocation;
import org.apache.hadoop.fs.FSDataInputStream;
import org.apache.hadoop.fs.FSDataOutputStream;
import org.apache.hadoop.fs.FileStatus;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.LocatedFileStatus;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.fs.RemoteIterator;
import org.apache.hadoop.io.IOUtils;
import org.junit.Test;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;

import java.io.FileInputStream;
import java.io.FileOutputStream;
import java.io.IOException;
import java.net.URI;
import java.net.URISyntaxException;
import java.util.Arrays;

/**
 * @author xinzhixuan
 * @version V1.0
 * @date 2019/7/5 22:56
 */
public class HDFSClient {
    private static final Logger logger = LoggerFactory.getLogger(HDFSClient.class);
    private static final String HDFS_URL = "hdfs://centos6-2:9000";
    private static final String HDFS_USER = "root";

    public static void main(String[] args) throws IOException, URISyntaxException, InterruptedException {
        Configuration config = new Configuration();
//        config.set("fs.defaultFS", "hdfs://centos6-2:9000");
        //1.获取hdfs客户端对象
        FileSystem fileSystem = FileSystem.get(new URI("hdfs://centos02:8020"), config, "bd");

        //2、执行命令
        fileSystem.mkdirs(new Path("/test"));
        //3.关闭资源
        fileSystem.close();
        logger.info("========over===");
    }

    @Test
    public void testFileUpload() throws URISyntaxException, IOException, InterruptedException {
        Configuration coinfg = new Configuration();
        coinfg.set("dfs.replication", "2");
        FileSystem fileSystem = FileSystem.get(new URI("hdfs://centos6-2:9000"), coinfg, "root");

        fileSystem.copyFromLocalFile(false, true, new Path("d:/report_data2.txt"), new Path("/"));

        logger.info("=========success====");
        fileSystem.close();
    }

    @Test
    public void testFileDownload() throws URISyntaxException, IOException, InterruptedException {
        Configuration configuration = new Configuration();
        FileSystem fileSystem = FileSystem.get(new URI("hdfs://centos6-2:9000"), configuration, "root");

        fileSystem.copyToLocalFile(false, new Path("/wc.input"), new Path("d:/"), true);

        logger.info("=========success====");
        fileSystem.close();
    }

    @Test
    public void testDelete() throws IOException, URISyntaxException, InterruptedException {
        Configuration configuration = new Configuration();
        FileSystem fileSystem = FileSystem.get(new URI("hdfs://centos6-2:9000"), configuration, "root");

        fileSystem.delete(new Path("/sanguo"), true);

        fileSystem.close();
    }

    @Test
    public void testRename() throws URISyntaxException, IOException, InterruptedException {
        Configuration configuration = new Configuration();
        FileSystem fileSystem = FileSystem.get(new URI("hdfs://centos6-2:9000"), configuration, "root");

        fileSystem.rename(new Path("/wc.input"), new Path("/wc2.input"));
        fileSystem.close();
    }

    @Test
    public void testListFile() throws URISyntaxException, IOException, InterruptedException {
        Configuration configuration = new Configuration();
        FileSystem fileSystem = FileSystem.get(new URI("hdfs://centos6-2:9000"), configuration, "root");
        RemoteIterator<LocatedFileStatus> iterator = fileSystem.listFiles(new Path("/"), true);
        while (iterator.hasNext()) {
            LocatedFileStatus fileStatus = iterator.next();
            logger.info("==文件名：" + fileStatus.getPath().getName());
            logger.info("==长度：" + fileStatus.getLen());
            logger.info("==权限：" + fileStatus.getPermission());
            logger.info("==分组：" + fileStatus.getGroup());
            //获取快存储信息
            BlockLocation[] blockLocations = fileStatus.getBlockLocations();
            for (BlockLocation blockLocation : blockLocations) {
                // 块存储的主机节点
                String[] hosts = blockLocation.getHosts();
                logger.info("==块存储的主机节点：" + Arrays.toString(hosts));
            }
            logger.info("=======================================================");
        }
        fileSystem.close();
    }

    @Test
    public void testListStatus() throws URISyntaxException, IOException, InterruptedException {
        Configuration configuration = new Configuration();
        FileSystem fileSystem = FileSystem.get(new URI("hdfs://centos6-2:9000"), configuration, "root");

        FileStatus[] fileStatuses = fileSystem.listStatus(new Path("/"));
        for (FileStatus fileStatus : fileStatuses) {
            if (fileStatus.isDirectory()) {
                logger.info("===文件夹：" + fileStatus.getPath().getName());
            } else {
                logger.info("===文件：" + fileStatus.getPath().getName());
            }
        }

        fileSystem.close();
    }

    // IO 流方式 上传文件
    @Test
    public void putFileToHDFS() throws URISyntaxException, IOException, InterruptedException {
        Configuration configuration = new Configuration();
        FileSystem fileSystem = FileSystem.get(new URI("hdfs://centos6-2:9000"), configuration, "root");

        FSDataOutputStream fsDataOutputStream = fileSystem.create(new Path("/hadoop-3.1.2.tar.gz"), true);
        FileInputStream fileInputStream = new FileInputStream("C:\\Users\\xinzh\\Desktop/hadoop-3.1.2.tar.gz");

        // 拷贝
        IOUtils.copyBytes(fileInputStream, fsDataOutputStream, configuration);
        IOUtils.closeStream(fsDataOutputStream);
        IOUtils.closeStream(fileInputStream);

        fileSystem.close();
    }

    // IO 流方式 下载文件
    @Test
    public void getFileFromHDFS() throws URISyntaxException, IOException, InterruptedException {
        Configuration configuration = new Configuration();
        FileSystem fileSystem = FileSystem.get(new URI(HDFS_URL), configuration, HDFS_USER);

        FSDataInputStream fsDataInputStream = fileSystem.open(new Path("/xzx.txt"));
        FileOutputStream fileOutputStream = new FileOutputStream("d:/xzx.txt");

        // 拷贝
        IOUtils.copyBytes(fsDataInputStream, fileOutputStream, configuration);
        IOUtils.closeStream(fileOutputStream);
        IOUtils.closeStream(fsDataInputStream);

        fileSystem.close();
    }

    //下载第一块
    @Test
    public void readFileSeek1() throws URISyntaxException, IOException, InterruptedException {
        Configuration configuration = new Configuration();
        FileSystem fileSystem = FileSystem.get(new URI(HDFS_URL), configuration, HDFS_USER);

        FSDataInputStream fsDataInputStream = fileSystem.open(new Path("/hadoop-3.1.2.tar.gz"));
        FileOutputStream fileOutputStream = new FileOutputStream("d:/hadoop-3.1.2.tar.gz.part1");

        // 拷贝
        byte[] bytes = new byte[1024];
        //这样控制只读第一块
        for (int i = 0; i < 1024 * 128; i++) {
            fsDataInputStream.read(bytes);
            fileOutputStream.write(bytes);
        }

        IOUtils.closeStream(fileOutputStream);
        IOUtils.closeStream(fsDataInputStream);

        fileSystem.close();
    }

    //下载第二块  type hadoop-2.7.2.tar.gz.part2 >> hadoop-2.7.2.tar.gz.part1   可以将块二合并到块1
    @Test
    public void readFileSeek2() throws URISyntaxException, IOException, InterruptedException {
        Configuration configuration = new Configuration();
        FileSystem fileSystem = FileSystem.get(new URI(HDFS_URL), configuration, HDFS_USER);

        FSDataInputStream fsDataInputStream = fileSystem.open(new Path("/hadoop-3.1.2.tar.gz"));
        FileOutputStream fileOutputStream = new FileOutputStream("d:/hadoop-3.1.2.tar.gz.part2");

        // 定位输入数据位置
        fsDataInputStream.seek(1024*1024*128);

        // 再拷贝一块
        byte[] bytes = new byte[1024];
        //这样控制只读第一块
        for (int i = 0; i < 1024 * 128; i++) {
            fsDataInputStream.read(bytes);
            fileOutputStream.write(bytes);
        }

        IOUtils.closeStream(fileOutputStream);
        IOUtils.closeStream(fsDataInputStream);

        fileSystem.close();
    }

    //下载第三块
    @Test
    public void readFileSeek3() throws URISyntaxException, IOException, InterruptedException {
        Configuration configuration = new Configuration();
        FileSystem fileSystem = FileSystem.get(new URI(HDFS_URL), configuration, HDFS_USER);

        FSDataInputStream fsDataInputStream = fileSystem.open(new Path("/hadoop-3.1.2.tar.gz"));
        FileOutputStream fileOutputStream = new FileOutputStream("d:/hadoop-3.1.2.tar.gz.part3");

        // 定位输入数据位置
        fsDataInputStream.seek(1024*1024*128 * 2);//跳过前两块

        // 再拷贝最后剩余的块
        IOUtils.copyBytes(fsDataInputStream, fileOutputStream, configuration);

        IOUtils.closeStream(fileOutputStream);
        IOUtils.closeStream(fsDataInputStream);

        fileSystem.close();
    }
}
