package com.cnzha.hdfs;

import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.*;
import org.apache.hadoop.io.IOUtils;
import org.junit.Test;
import org.mortbay.util.ajax.JSON;

import java.io.File;
import java.io.FileInputStream;
import java.io.FileOutputStream;
import java.io.IOException;
import java.net.URI;
import java.net.URISyntaxException;

/**
 * @Author: zhaoxin
 * @email: zdkk@foxmail.com
 * @Date: 3/24/2020 8:37 PM
 */
public class HdfsClient {

    @Test
    public void testMkdirs() throws IOException {

        Configuration entries = new Configuration();
        entries.set("fs.defaultFS", "hdfs://hadoop102:9000");
        FileSystem fileSystem = FileSystem.get(entries);
        boolean mkdirs = fileSystem.mkdirs(new Path("/0228/zhaoxin"));
        fileSystem.close();
    }

    @Test
    public void testCopyFromLocalFile() throws URISyntaxException, IOException, InterruptedException {
        Configuration configuration = new Configuration();
        /**
         * 代码中设置副本数，会覆盖配置中副本的数量
         * 代码优先级 > 代码配置 > 服务器配置
         */
        configuration.set("dfs.replication", "2");
        FileSystem fileSystem = FileSystem.get(new URI("hdfs://hadoop102:9000"), configuration, "hadoopuser");
        fileSystem.copyFromLocalFile(new Path("c:/lisence.txt"), new Path("/0228/dake.data"));
        fileSystem.close();
        System.out.println("over");
    }

    @Test
    public void testCopyToLocalFile() throws IOException, InterruptedException, URISyntaxException {

        Configuration configuration = new Configuration();
        FileSystem fileSystem = FileSystem.get(new URI("hdfs://hadoop102:9000"), configuration, "hadoopuser");

//        fileSystem.copyToLocalFile(new Path("/0228/zhaoxin/zx.data"), new Path("d:/zx.data"));
        /**
         * useRawLocalFileSystem = true 时不会产生crc文件
         */
        fileSystem.copyToLocalFile(true, new Path("/0228/zhaoxin/zx.data"), new Path("d:/zx.data"), true);

        fileSystem.close();
    }

    @Test
    public void testDelete() throws IOException, InterruptedException, URISyntaxException {
        Configuration configuration = new Configuration();
        FileSystem fileSystem = FileSystem.get(new URI("hdfs://hadoop102:9000"), configuration, "hadoopuser");

        fileSystem.delete(new Path("/0228/zhaoxin/"), true);
        fileSystem.close();
    }

    @Test
    public void testRename() throws IOException, InterruptedException, URISyntaxException {
        Configuration configuration = new Configuration();
        FileSystem fileSystem = FileSystem.get(new URI("hdfs://hadoop102:9000"), configuration, "hadoopuser");

        fileSystem.rename(new Path("/wc.input"), new Path("/0228/wc.input"));
        fileSystem.close();

    }

    @Test
    public void testListFiles() throws IOException, InterruptedException, URISyntaxException {
        Configuration configuration = new Configuration();
        FileSystem fileSystem = FileSystem.get(new URI("hdfs://hadoop102:9000"), configuration, "hadoopuser");

        RemoteIterator<LocatedFileStatus> locatedFileStatusRemoteIterator = fileSystem.listFiles(new Path("/0228/"), true);
        while (locatedFileStatusRemoteIterator.hasNext()) {
            LocatedFileStatus next = locatedFileStatusRemoteIterator.next();
            System.out.println("getName() = " + next.getPath().getName());
            System.out.println("getParent() = " + next.getPath().getParent());
            System.out.println("getGroup() = " + next.getGroup());
            System.out.println("getPermission() = " + next.getPermission());

            BlockLocation[] blockLocations = next.getBlockLocations();
            for (BlockLocation blockLocation : blockLocations) {
                System.out.println(JSON.toString(blockLocation.getHosts()));
            }

        }
        fileSystem.close();
    }


    @Test
    public void testListStatus() throws IOException, InterruptedException, URISyntaxException {
        Configuration configuration = new Configuration();
        FileSystem fileSystem = FileSystem.get(new URI("hdfs://hadoop102:9000"), configuration, "hadoopuser");

        FileStatus[] fileStatuses = fileSystem.listStatus(new Path("/"));
        for (FileStatus fileStatus : fileStatuses) {
            if (fileStatus.isFile()) {
                System.out.println(fileStatus.getPath() + "  文件");
            } else {
                System.out.println(fileStatus.getPath() + "  目录");
            }
        }
    }


    @Test
    public void putFileToHDFS() throws IOException, InterruptedException, URISyntaxException {
        Configuration configuration = new Configuration();
        configuration.set("dfs.replication", "3");

        FileSystem fileSystem = FileSystem.get(new URI("hdfs://hadoop102:9000"), configuration, "hadoopuser");
        //输入流
        FileInputStream fileInputStream = new FileInputStream(new File("d:/me.jpg"));
        //输出流
        FSDataOutputStream dataOutputStream = fileSystem.create(new Path("/0228/me.jpg"));
        //流对拷
        IOUtils.copyBytes(fileInputStream, dataOutputStream, configuration);
        IOUtils.closeStream(fileInputStream);
        IOUtils.closeStream(dataOutputStream);

        fileSystem.close();
    }

    // 文件下载
    @Test
    public void getFileFromHDFS() throws IOException, InterruptedException, URISyntaxException {
        Configuration configuration = new Configuration();

        FileSystem fileSystem = FileSystem.get(new URI("hdfs://hadoop102:9000"), configuration, "hadoopuser");
        FileOutputStream fos = new FileOutputStream(new File("d:/zhengjianzhao.jpg"));
        FSDataInputStream open = fileSystem.open(new Path("/0228/me.jpg"));
        IOUtils.copyBytes(open, fos, configuration);
        IOUtils.closeStream(fos);
        IOUtils.closeStream(open);
        fileSystem.close();
    }

    //    （1）下载第一块
    @Test
    public void readFileSeek1() throws IOException, InterruptedException, URISyntaxException {
        Configuration configuration = new Configuration();
        FileSystem fileSystem = FileSystem.get(new URI("hdfs://hadoop102:9000"), configuration, "hadoopuser");
        FSDataInputStream dataInputStream = fileSystem.open(new Path("/0228/hadoop-2.7.2.tar.gz"));
        FileOutputStream fileOutputStream = new FileOutputStream(new File("d:/hadoop.tar.gz.part1"));
        byte[] bytes = new byte[1024];
        for (int i = 0; i < 1024 * 128; i++) {
            dataInputStream.read(bytes);
            fileOutputStream.write(bytes);
        }
        IOUtils.closeStream(fileOutputStream);
        IOUtils.closeStream(dataInputStream);
        fileSystem.close();
    }

    //    （2）下载第二块
    @Test
    public void readFileSeek2() throws IOException, InterruptedException, URISyntaxException {
        Configuration configuration = new Configuration();
        FileSystem fileSystem = FileSystem.get(new URI("hdfs://hadoop102:9000"), configuration, "hadoopuser");
        FSDataInputStream dataInputStream = fileSystem.open(new Path("/0228/hadoop-2.7.2.tar.gz"));
        dataInputStream.seek(1024 * 1024 * 128);

        FileOutputStream fileOutputStream = new FileOutputStream(new File("d:/hadoop.tar.gz.part2"));
        IOUtils.copyBytes(dataInputStream,fileOutputStream,configuration);
        IOUtils.closeStream(fileOutputStream);
        IOUtils.closeStream(dataInputStream);
        fileSystem.close();
    }
}
