package demo;

import org.apache.commons.codec.digest.DigestUtils;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.util.ReflectionUtils;
import org.junit.After;
import org.junit.Before;
import org.junit.Test;

import java.io.File;
import java.io.FileInputStream;
import java.io.FileOutputStream;
import java.io.IOException;
import java.nio.charset.StandardCharsets;
import java.util.ArrayList;
import java.util.List;

public class SequenceFileDemo {

    private Configuration conf;
    private FileSystem fs;

    @Before
    public void init() throws IOException {
        // 构造一个配置参数对象，设置一个参数：要访问的HDFS的URI
        conf = new Configuration();
        // 指定使用HDFS访问
        conf.set("fs.default.name", "hdfs://localhost:9001");
        // 进行客户端身份的设置(root为虚拟机的用户名，hadoop集群节点的其中一个都可以)
        System.setProperty("HADOOP_USER_NAME", "root");
        // 通过FileSystem的静态get()方法获取HDFS文件系统客户端对象
        fs = FileSystem.get(conf);
    }

    @After
    public void after() throws IOException {
        fs.close();
    }

    private String[] DATA = {"a", "b", "c", "d"};

    /**
     * 写入文件
     *
     * @throws IOException
     */
    @Test
    public void write() throws IOException {
        Path path = new Path("/temp/text/1");
        SequenceFile.Writer writer = SequenceFile.createWriter(fs, conf, path, Text.class, IntWritable.class);
        Text key = new Text();
        IntWritable value = new IntWritable();
        for (int i = 0; i < DATA.length; i++) {
            key.set(DATA[i]);
            value.set(i);
            System.out.printf("[%s]\t%s\t%s\n", writer.getLength(), key, value);
            writer.append(key, value);
        }
        IOUtils.closeStream(writer);
    }

    /**
     * 读文件
     *
     * @throws IOException
     */
    @Test
    public void read() throws IOException {
        Path path = new Path("/temp/text/1");
        SequenceFile.Reader reader = new SequenceFile.Reader(fs, path, conf);

        Writable key = (Writable) ReflectionUtils.newInstance(reader.getKeyClass(), conf);
        Writable value = (Writable) ReflectionUtils.newInstance(reader.getValueClass(), conf);
        while (reader.next(key, value)) {
            System.out.printf("%s\t%s\n", key, value);
        }
        IOUtils.closeStream(reader);
    }

    /**
     * 测试本地分块上传
     */
    @Test
    public void testBlockUpload() throws IOException {
        String path = "E:\\temp\\music.mp4";
        File file = new File(path);
        FileInputStream fileInputStream = new FileInputStream(file);
        String name = file.getName().split("\\.")[0];
        FileOutputStream fileOutputStream = null;
        int len = 1024 * 1024 * 8;  //2MB
        byte[] bytes = new byte[len];
        int readData = 0;
        int count = 1;
        String newPath;
        String baseName = "E:\\temp\\down\\";
        while (((readData = fileInputStream.read(bytes)) != -1)) {
            newPath = baseName + name + count;
            fileOutputStream = new FileOutputStream(newPath);
            fileOutputStream.write(bytes, 0, readData);
            count++;
        }
        fileInputStream.close();
        fileOutputStream.close();
    }

    /**
     * 测试本地合并
     */
    @Test
    public void testMerge() throws Exception {
        File file = new File("E:\\temp\\down");
        File[] files = file.listFiles();
        FileInputStream fileInputStream = null;
        FileOutputStream fileOutputStream = new FileOutputStream("E:\\temp\\music\\music.mp4");
        int readData = 0;
        byte[] bytes = new byte[1024];
        for (File file1 : files) {
            if (file1 != null) {
                fileInputStream = new FileInputStream(file1);
                while (((readData = fileInputStream.read(bytes)) != -1)) {
                    fileOutputStream.write(bytes, 0, readData);
                }
            }
        }
        fileInputStream.close();
        fileOutputStream.close();
    }


    /**
     * 根据key单个文件下载
     */
    @Test
    public void test1() throws IOException {
        downOneMergeFile(new Path("/demo/file"), "E:\\temp\\test2", "test1\\file1\\test3.txt");
    }

    /**
     * 可以解决有目录结构的情况
     */
    @Test
    public void test5() throws Exception {
        mergeFile(new Path("/demo/file"), "E:\\temp\\test1");
    }
    /**
     * 下载有目录结构的文件系统
     */
    @Test
    public void test6() throws Exception {
        downAllMergeFile(new Path("/demo/file"), "E:\\temp\\test2");
    }


    /**
     * 得到的目录应该是基于选中的文件的，不能是相对磁盘的路径，
     *
     * @param path
     * @param paths
     */
    public List<String> getLocalPath(String path, List<String> paths) {
        File file = new File(path);
        File[] files = file.listFiles();
        for (File file1 : files) {
            if (file1.isDirectory()) {
                getLocalPath(file1.toString(), paths);
                continue;
            }
            paths.add(file1.toString());
        }
        return paths;
    }

    //    返回结果是小文件的相对路径
    public List<String> cutPath(String localPath, List<String> smallLocalPaths) {
        String[] split = localPath.split("\\\\");
        int length = split[split.length - 1].length();
        int cutLen = localPath.length() - length;
        List<String> arrayList = new ArrayList<>();
        for (int i = 0; i < smallLocalPaths.size(); i++) {
            arrayList.add(smallLocalPaths.get(i).substring(cutLen));
        }
        return arrayList;
    }


    /**
     * 传入两个路径实现文件的上传.
     * TODO：还是差点意思啊，㐈说是文件夹的话应该迭代一下。继续寻找下面的文件路径
     *
     * @param path      fs路径
     * @param localPath 本地路径
     * @throws Exception
     */
    public void mergeFile(Path path, String localPath) throws Exception {
        SequenceFile.Writer.Option bigFile = SequenceFile.Writer.file(path);
        SequenceFile.Writer.Option keyClass = SequenceFile.Writer.keyClass(Text.class);
        SequenceFile.Writer.Option valueClass = SequenceFile.Writer.valueClass(BytesWritable.class);
        SequenceFile.Writer writer = SequenceFile.createWriter(conf, bigFile, keyClass, valueClass);
        Text key = new Text();
        List<String> localSmallPaths = getLocalPath(localPath, new ArrayList<String>());
        List<String> relativePaths = cutPath(localPath, localSmallPaths);
        for (int i = 0; i < localSmallPaths.size(); i++) {
            String s = localSmallPaths.get(i);
            String s1 = relativePaths.get(i);
            File file = new File(s);
            long fileSize = file.length();
            byte[] fileContent = new byte[(int) fileSize];
            FileInputStream inputStream = new FileInputStream(file);
            inputStream.read(fileContent, 0, (int) fileSize);
            String md5Str = DigestUtils.md5Hex(fileContent);
            System.out.println("merge小文件：" + s1 + ",md5:" + md5Str);
            key.set(s1);
            // 把文件路径作为key，文件内容做为value，放入到sequenceFile中
            writer.append(key, new BytesWritable(fileContent));
        }
        writer.hflush();
        writer.close();
    }

    /**
     * 读取分块文件并且全部下载到某一个目录
     *
     * @param path      Hadoop文件路径
     * @param localPath 本地文件路径
     * @throws Exception
     */
    public void downAllMergeFile(Path path, String localPath) throws Exception {
        SequenceFile.Reader.Option file = SequenceFile.Reader.file(path);
        SequenceFile.Reader reader = new SequenceFile.Reader(conf, file);
        Text key = new Text();
        BytesWritable value = new BytesWritable();
//        如果下一个文件存在的话。
        while (reader.next(key, value)) {
            System.out.println(key);
            byte[] bytes = value.copyBytes();
            String md5 = DigestUtils.md5Hex(bytes);
            String content = new String(bytes, StandardCharsets.UTF_8);
            String localRealPath = localPath + "\\\\" + key;
            String[] split = localRealPath.split("\\\\");
            int length = split[split.length-1].length();
            String jude = localRealPath.substring(0,localRealPath.length()-length);
            File file1 = new File(jude);
            if (!file1.exists()){
                file1.mkdirs();
            }
            FileOutputStream fileOutputStream = new FileOutputStream(localRealPath);
            fileOutputStream.write(bytes);
            System.out.println("读取到文件：" + key + ",md5:" + md5 + ",content:" + content);
            fileOutputStream.close();
        }
    }

    /**
     * 根据某个文件的key下载到本地
     *
     * @param path      Hadoop文件路径
     * @param localPath 本地文件路径
     * @param keyData   文件的key
     */
    public void downOneMergeFile(Path path, String localPath, String keyData) throws IOException {
        SequenceFile.Reader.Option file = SequenceFile.Reader.file(path);
        SequenceFile.Reader reader = new SequenceFile.Reader(conf, file);
        Text key = new Text();
        BytesWritable value = new BytesWritable();
//        如果下一个文件存在的话。
        while (reader.next(key, value)) {
            System.out.println(key);
            if (!key.toString().equals(keyData)){
                System.out.println("没有");
                continue;
            }
            byte[] bytes = value.copyBytes();
            String md5 = DigestUtils.md5Hex(bytes);
            String content = new String(bytes, StandardCharsets.UTF_8);
            String localRealPath = localPath + "\\\\" + key;
            String[] split = localRealPath.split("\\\\");
            int length = split[split.length-1].length();
            String jude = localRealPath.substring(0,localRealPath.length()-length);
            File file1 = new File(jude);
            if (!file1.exists()){
                file1.mkdirs();
            }
            FileOutputStream fileOutputStream = new FileOutputStream(localRealPath);
            fileOutputStream.write(bytes);
            System.out.println("读取到文件：" + key + ",md5:" + md5 + ",content:" + content);
            fileOutputStream.close();
        }
    }
}
