package com.kakarota.hadoop.hdfs;

import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.*;
import org.apache.hadoop.io.IOUtils;
import org.apache.hadoop.util.Progressable;
import org.junit.After;
import org.junit.Before;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.junit.runners.Parameterized;
//import org.mockito.*;
//import org.springframework.core.env.ConfigurableEnvironment;
//import org.springframework.core.env.StandardEnvironment;
//import org.springframework.boot.test.util.EnvironmentBuilder;

import java.io.*;
import java.net.URI;
import java.util.Map;
//import static org.mockito.Mockito.*;
//
//import static org.mockito.Mockito.when;

/**
 * @program: hadoop-train
 * @description: Hadoop HDFS Java API 操作
 * @author: 01
 * @create: 2018-03-25 13:59
 **/
//@RunWith(Parameterized.class)
public class HDFSAPP {

    // HDFS文件系统服务器的地址以及端口
    public static final String HDFS_PATH = "hdfs://192.168.1.117:9000";
    // HDFS文件系统的操作对象
    FileSystem fileSystem = null;
    // 配置对象
    Configuration configuration = null;

//    // 构造函数接收参数
//    public HDFSAPP(int input, int expected) {
//        this.input = input;
//        this.expected = expected;
//    }

//    @Mock
//    private Map<String, String> envMap; // 模拟环境变量Map

    /**
     * 创建HDFS目录
     */
    @Test
    public void mkdir()throws Exception{
        // 需要传递一个Path对象
        fileSystem.mkdirs(new Path("/hdfsapi/test"));
    }

    // 准备资源
    @Before
    public void setUp() throws Exception {

        System.setProperty("HADOOP_HOME", "\\\\DESKTOP-I92VUSS\\f\\hadoop\\hadoop-3.3.5");

        configuration = new Configuration();
        // 第一参数是服务器的URI，第二个参数是配置对象，第三个参数是文件系统的用户名
        fileSystem = FileSystem.get(new URI(HDFS_PATH), configuration, "ASD");

//        // 使用environment对象进行测试
//        ConfigurableEnvironment environment = new StandardEnvironment();
//        new EnvironmentBuilder(environment)
//                .with("MY_ENV_VAR", "myValue")
//                .applyTo(environment);

//        MockitoAnnotations.initMocks(this); // 初始化Mockito注解
//        when(envMap.get("HADOOP_HOME")).thenReturn("\\\\DESKTOP-I92VUSS\\f\\hadoop\\hadoop-3.3.5"); // 设置模拟返回值

        System.out.println("HDFSAPP.setUp");
    }

    // 释放资源
    @After
    public void tearDown() throws Exception {
        configuration = null;
        fileSystem = null;

        System.out.println("HDFSAPP.tearDown");
    }

    /**
     * 创建文件
     */
    @Test
    public void create() throws Exception {
        // 创建文件
        FSDataOutputStream outputStream = fileSystem.create(new Path("/hdfsapi/test/a.txt"));
        // 写入一些内容到文件中
        outputStream.write("hello hadoop".getBytes());
        outputStream.flush();
        outputStream.close();
    }

    /**
     * 创建文件
     */
    @Test
    public void addContent() throws Exception {
        // 创建文件
        FSDataOutputStream outputStream = fileSystem.create(new Path("/hdfsapi/test/a.txt"), true);
        // 写入一些内容到文件中
        outputStream.write("hello hadoop\n".getBytes());
        String content = " hello world\n" +
                "hadoop welcome\n" +
                "hadoop hdfs mapreduce\n" +
                "hadoop hdfs";
        outputStream.write(content.getBytes());
        outputStream.flush();
        outputStream.close();
    }

    /**
     * 查看HDFS里某个文件的内容
     */
    @Test
    public void cat() throws Exception {
        // 读取文件
        FSDataInputStream in = fileSystem.open(new Path("/hdfsapi/test/a.txt"));
        // 将文件内容输出到控制台上，第三个参数表示输出多少字节的内容
        IOUtils.copyBytes(in, System.out, 1024);
        in.close();
    }

    /**
     * 上传本地文件到HDFS
     */
    @Test
    public void copyFromLocalFile() throws Exception {
//        Path localPath = new Path("E:/java面试试卷.docx");
        Path localPath = new Path("E:/partitioner.txt");
        Path hdfsPath = new Path("/hdfsapi/test/");
        // 第一个参数是本地文件的路径，第二个则是HDFS的路径
        fileSystem.copyFromLocalFile(localPath, hdfsPath);
    }

    /**
     * 上传大体积的本地文件到HDFS，并显示进度条
     */
    @Test
    public void copyFromLocalFileWithProgress() throws Exception {
        InputStream in = new BufferedInputStream(new FileInputStream(new File("E:/Linux Install/mysql_cluster.iso")));
        FSDataOutputStream outputStream = fileSystem.create(new Path("/hdfsapi/test/mysql_cluster.iso"), new Progressable() {
            public void progress() {
                // 进度条的输出
                System.out.print(".");
            }
        });
        IOUtils.copyBytes(in, outputStream, 4096);
        in.close();
        outputStream.close();
    }

    /**
     * 下载HDFS文件2
     *
     */
    @Test
    public void copyToLocalFile2() throws Exception {
        FSDataInputStream in = fileSystem.open(new Path("/hdfsapi/test/b.txt"));
        OutputStream outputStream = new FileOutputStream(new File("E:/b.txt"));
        IOUtils.copyBytes(in, outputStream, 1024);
        in.close();
        outputStream.close();
    }

    /**
     * 查看某个目录下所有的文件
     *
     * @throws Exception
     */
    @Test
    public void listFiles() throws Exception {
        FileStatus[] fileStatuses = fileSystem.listStatus(new Path("/hdfsapi/test/"));
        for (FileStatus fileStatus : fileStatuses) {
            System.out.println("这是一个：" + (fileStatus.isDirectory() ? "文件夹" : "文件"));
            System.out.println("副本系数：" + fileStatus.getReplication());
            System.out.println("大小：" + fileStatus.getLen());
            System.out.println("路径：" + fileStatus.getPath() + "\n");
        }
    }

}