package com.testhadoop.demo;

import org.apache.commons.codec.digest.DigestUtils;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.BytesWritable;
import org.apache.hadoop.io.SequenceFile;
import org.apache.hadoop.io.Text;
import org.junit.After;
import org.junit.Before;
import org.junit.Test;

import java.io.File;
import java.io.FileInputStream;
import java.io.FileOutputStream;
import java.io.IOException;
import java.nio.charset.StandardCharsets;
import java.util.ArrayList;
import java.util.List;

public class SequenceFileDemo {
    private Configuration conf;
    private FileSystem fs;

    @Before
    public void init() throws Exception {
        // 构造一个配置参数对象，设置一个参数：要访问的HDFS的URI
        conf = new Configuration();
        // 指定使用HDFS访问
//        这个配置为：/hadoop-3.1.3/etc/hadoop/core-site.xml  的第一个配置。改了ip就行。
        conf.set("fs.defaultFS", "hdfs://192.168.1.129:9000");
        // 进行客户端身份的设置(root为虚拟机的用户名，hadoop集群节点的其中一个都可以)
        System.setProperty("HADOOP_USER_NAME", "root");
        // 通过FileSystem的静态get()方法获取HDFS文件系统客户端对象
        fs = org.apache.hadoop.fs.FileSystem.get(conf);
    }

    @After
    public void close() throws IOException {
        //关闭资源
        fs.close();
    }

    /**
     * 根据key下载单个文件
     * 下载
     */
    @Test
    public void test1() throws IOException {
        downOneMergeFile(new Path("/demo2/file"), "E:\\temp\\test8", "test1\\file2\\特色t.txt");
    }

    /**
     * 可以解决有目录结构的情况
     * 上传
     */
    @Test
    public void test2() throws Exception {
        mergeFile(new Path("/demo2/file"), "E:\\temp\\test1");
    }

    /**
     * 根据某个文件的key下载到本地，保存路径结构。
     *
     * @param path      Hadoop文件路径
     * @param localPath 下载的地址
     * @param keyData   文件的key
     */
    public void downOneMergeFile(Path path, String localPath, String keyData) throws IOException {
        SequenceFile.Reader reader = new SequenceFile.Reader(conf, SequenceFile.Reader.file(path));
        Text key = new Text();
        BytesWritable value = new BytesWritable();
//        如果下一个文件存在的话。
        while (reader.next(key, value)) {
            System.out.println(key);
            if (!key.toString().equals(keyData)) {
                continue;
            }
            byte[] bytes = value.copyBytes();
            String md5 = DigestUtils.md5Hex(bytes);
            String content = new String(bytes, StandardCharsets.UTF_8);
            String localRealPath = localPath + "\\\\" + key;
            String[] split = localRealPath.split("\\\\");
            File file1 = new File(localRealPath.substring(0, localRealPath.length() - split[split.length - 1].length()));
            if (!file1.exists()) {
                file1.mkdirs();
            }
            FileOutputStream fileOutputStream = new FileOutputStream(localRealPath);
            fileOutputStream.write(bytes);
            System.out.println("读取到文件：" + key + ",md5:" + md5 + ",content:" + content);
            fileOutputStream.close();
        }
    }

    /**
     * 传入两个路径实现文件的上传.
     *
     * @param path      hadoop文件路径全称
     * @param localPath 本地文件的路径。
     *  将文件的相对路径作为key。  文件作为value存储
     */
    public void mergeFile(Path path, String localPath) throws Exception {
        SequenceFile.Writer.Option bigFile = SequenceFile.Writer.file(path);
        SequenceFile.Writer.Option keyClass = SequenceFile.Writer.keyClass(Text.class);
        SequenceFile.Writer.Option valueClass = SequenceFile.Writer.valueClass(BytesWritable.class);
        SequenceFile.Writer writer = SequenceFile.createWriter(conf, bigFile, keyClass, valueClass);
        Text key = new Text();
        List<String> localSmallPaths = getLocalPath(localPath, new ArrayList<String>());
        List<String> relativePaths = cutPath(localPath, localSmallPaths);
        for (int i = 0; i < localSmallPaths.size(); i++) {
            String s = localSmallPaths.get(i);
            String s1 = relativePaths.get(i);
            File file = new File(s);
            long fileSize = file.length();
            byte[] fileContent = new byte[(int) fileSize];
            FileInputStream inputStream = new FileInputStream(file);
            inputStream.read(fileContent, 0, (int) fileSize);
            String md5Str = DigestUtils.md5Hex(fileContent);
            System.out.println("merge小文件：" + s1 + ",md5:" + md5Str);
            key.set(s1);
            // 把文件路径作为key，文件内容做为value，放入到sequenceFile中
            writer.append(key, new BytesWritable(fileContent));
        }
        writer.hflush();
        writer.close();
    }

    /**
     * @param path  传入本地磁盘的路径。
     * @param paths 用来接收文件路径的集合
     */
    public List<String> getLocalPath(String path, List<String> paths) {
        File file = new File(path);
        File[] files = file.listFiles();
        for (File file1 : files) {
            if (file1.isDirectory()) {
                getLocalPath(file1.toString(), paths);
                continue;
            }
            paths.add(file1.toString());
        }
        return paths;
    }

    /**
     * 将文件的路径进行切割。  比如：E:\temp\test3
     * 其中有5个文件，最后会将这5个路径切割为:
     * test3\test1.txt
     * test3\test2.txt
     * test3\test3.txt
     * test3\test4.txt
     * test3\test5.txt  作为返回结果
     *
     * @param localPath       本地磁盘路径。
     * @param smallLocalPaths 小文件本地路径，
     */
    public List<String> cutPath(String localPath, List<String> smallLocalPaths) {
        String[] split = localPath.split("\\\\");
        int length = split[split.length - 1].length();
        int cutLen = localPath.length() - length;
        List<String> arrayList = new ArrayList<>();
        for (int i = 0; i < smallLocalPaths.size(); i++) {
            arrayList.add(smallLocalPaths.get(i).substring(cutLen));
        }
        return arrayList;
    }

}
