package org.example.file.avro;

import cn.hutool.core.util.RandomUtil;
import cn.newrank.api.xhs.custom.project.system.biz.smxk.topic.pojo.avro.SmxkXhsOpusDailyDataAvro;
import org.apache.avro.file.DataFileReader;
import org.apache.avro.file.DataFileWriter;
import org.apache.avro.io.DatumReader;
import org.apache.avro.io.DatumWriter;
import org.apache.avro.specific.SpecificDatumReader;
import org.apache.avro.specific.SpecificDatumWriter;

import java.io.File;
import java.io.IOException;
import java.util.*;
import java.util.function.Function;

public class AvroFileDistinct {

    /**
     * @param targetFile     目标文件的地址
     * @param splitSize      需要划分小文件的数量
     * @param t              文件的类型
     * @param handleCode     自定义hashcode
     * @param handleDistinct 自定义去重字段，需要和获取hashcode的字段一致
     */
    public static <T extends org.apache.avro.specific.SpecificRecordBase> void distinctFile(String targetFile, int splitSize, Class<T> t, Function<T, Integer> handleCode, Function<T, String> handleDistinct) throws Exception {
        File[] files = splitFileV2(targetFile, splitSize, t, handleCode);
       distinct(files, targetFile, splitSize, t, handleDistinct);
    }

    /**
     * 将文件hash取模之后放到不同的小文件中
     *
     * @param targetFile 要去重的文件路径
     * @param splitSize  将目标文件切割成多少份hash取模的小文件个数
     */
    private static <T extends org.apache.avro.specific.SpecificRecordBase> File[] splitFile(String targetFile, int splitSize, Class<T> tClass, Function<T, Integer> handleCode) throws Exception {
        //目标文件
        File file = new File(targetFile);
        //切割的小文件
        File[] littleFiles = new File[splitSize];
        String parentPath = file.getParent();
        File tempFolder = new File(parentPath + File.separator + "test");
        if (!tempFolder.exists()) {
            tempFolder.mkdir();
        }
        List<DataFileWriter<T>> writers = new ArrayList<>();
        for (int i = 0; i < splitSize; i++) {
            littleFiles[i] = new File(tempFolder, i + ".avro");
            if (littleFiles[i].exists()) {
                littleFiles[i].delete();
            }
            DatumWriter<T> datumWriter = new SpecificDatumWriter<>(tClass);
            DataFileWriter<T> writer = new DataFileWriter<>(datumWriter);
            writer.create(tClass.newInstance().getSchema(), littleFiles[i]);
            writers.add(writer);
        }
        //作品
        DatumReader<T> userDatumReader = new SpecificDatumReader<>(tClass);
        try (DataFileReader<T> dataFileReader = new DataFileReader<>(file, userDatumReader)) {
            while (dataFileReader.hasNext()) {
                T data = dataFileReader.next();
                //关键是将每行数据hash取模之后放到对应取模值的文件中，确保hash值相同的字符串都在同一个文件里面
                int code = handleCode.apply(data);
                int index = Math.abs(code % splitSize);
                DataFileWriter<T> writer = writers.get(index);
                writer.append(data);
                writer.flush();
            }
        } catch (Exception e) {
            throw new Exception("文件切割出现异常，原因:" + e.getMessage());
        }finally {
            for (DataFileWriter<T> writer : writers) {
                writer.close();
            }
        }
        return littleFiles;
    }

    /**
     * 将文件hash取模之后放到不同的小文件中
     *
     * @param targetFile 要去重的文件路径
     * @param splitSize  将目标文件切割成多少份hash取模的小文件个数
     */
    private static <T extends org.apache.avro.specific.SpecificRecordBase> File[] splitFileV2(String targetFile, int splitSize, Class<T> tClass, Function<T, Integer> handleCode) throws Exception {
        //目标文件
        File file = new File(targetFile);
        //切割的小文件
        File[] littleFiles = new File[splitSize];
        String parentPath = file.getParent();
        File tempFolder = new File(parentPath + File.separator + "test");
        if (!tempFolder.exists()) {
            tempFolder.mkdir();
        }
        for (int i = 0; i < splitSize; i++) {
            littleFiles[i] = new File(tempFolder, i + ".avro");
            if (littleFiles[i].exists()) {
                littleFiles[i].delete();
            }
        }
        DatumWriter<T> datumWriter = new SpecificDatumWriter<>(tClass);
        DataFileWriter<T> writer = new DataFileWriter<>(datumWriter);
        //作品
        DatumReader<T> userDatumReader = new SpecificDatumReader<>(tClass);
        try (DataFileReader<T> dataFileReader = new DataFileReader<>(file, userDatumReader)) {
            while (dataFileReader.hasNext()) {
                T data = dataFileReader.next();
                //关键是将每行数据hash取模之后放到对应取模值的文件中，确保hash值相同的字符串都在同一个文件里面
                int code = handleCode.apply(data);
                int index = Math.abs(code % splitSize);
                if (littleFiles[index].length() > 0L) {
                    writer.appendTo(littleFiles[index]);
                } else {
                    writer.create(data.getSchema(), littleFiles[index]);
                }
                writer.append(data);
                writer.flush();
            }
        } catch (Exception e) {
            throw new Exception("文件切割出现异常，原因:" + e.getMessage());
        }finally {
            writer.close();
        }
        return littleFiles;
    }


    /**
     * 将小文件去重，然后将数据写回目标文件
     */
    private static <T extends org.apache.avro.specific.SpecificRecordBase> void distinct(File[] littleFiles, String distinctFilePath, int splitSize, Class<T> tClass, Function<T, String> handleDistinct) throws Exception {
        File distinctedFile = new File(distinctFilePath);
        DataFileWriter<T> writer = null;
        T t = tClass.newInstance();
        try {
            if (distinctedFile.exists()) {
                distinctedFile.delete();
            }
            //写文件
            DatumWriter<T> datumWriter = new SpecificDatumWriter<>(tClass);
            writer = new DataFileWriter<>(datumWriter);
            writer.create(t.getSchema(), distinctedFile);
            Set<String> set = new HashSet<>();
            DataFileReader<T> dataFileReader = null;
            for (int i = 0; i < splitSize; i++) {
                if (littleFiles[i].exists()) {
                    try {
                        System.out.println("开始对小文件：" + littleFiles[i].getName() + "去重");
                        DatumReader<T> userDatumReader = new SpecificDatumReader<>(tClass);
                        dataFileReader = new DataFileReader<>(littleFiles[i], userDatumReader);
                        while (dataFileReader.hasNext()) {
                            T data = dataFileReader.next();
                            String res = handleDistinct.apply(data);
                            if (set.contains(res)) {
                                continue;
                            }
                            set.add(res);
                            writer.append(data);
                        }
                        writer.flush();
                        set.clear();
                        System.gc();
                    } finally {
                        if (dataFileReader != null) {
                            dataFileReader.close();
                        }
                    }
                }
            }
        } catch (Exception e) {
            throw new Exception("小文件去重失败，失败原因：" + e.getMessage());
        } finally {
            //合并完成之后删除临时小文件
            for (int i = 0; i < splitSize; i++) {
                if (littleFiles[i].exists()) {
                    littleFiles[i].delete();
                }
            }
            try {
                if (writer != null) {
                    writer.close();
                }
            } catch (IOException e) {
                throw new Exception("文件流关闭异常" + e.getMessage());
            }
        }

    }

    public static void main(String[] args) throws Exception {
        distinct();
        // copy();
    }

    public static void distinct() throws Exception {
        long begin = System.currentTimeMillis();
        distinctFile("D:\\doc\\xhs-opus-daily-data-copy.avro",
                5,
                SmxkXhsOpusDailyDataAvro.class,
                (t) -> t.getNoteId().hashCode(),
                (t) -> String.valueOf(t.getNoteId()));
        long end = System.currentTimeMillis();
        System.out.println("文件去重耗时: " + (end - begin) + "ms");
    }

    public static void copy() throws IOException {
        System.out.println("正在进行文件拷贝...");
        File source = new File("D:\\\\doc\\\\xhs-opus-daily-data.avro");
        File target = new File("D:\\\\doc\\\\xhs-opus-daily-data-copy.avro");
        copy1GB(source, target);
        System.out.println("文件拷贝成功");
    }

    public static void copy1GB(File source, File target) {
        DatumReader<SmxkXhsOpusDailyDataAvro> datumReader = new SpecificDatumReader<>(SmxkXhsOpusDailyDataAvro.class);
        SpecificDatumWriter<SmxkXhsOpusDailyDataAvro> datumWriter = new SpecificDatumWriter<>(SmxkXhsOpusDailyDataAvro.class);
        try (DataFileReader<SmxkXhsOpusDailyDataAvro> reader = new DataFileReader<>(source, datumReader);
             DataFileWriter<SmxkXhsOpusDailyDataAvro> writer = new DataFileWriter<>(datumWriter)) {
            writer.create(SmxkXhsOpusDailyDataAvro.class.newInstance().getSchema(), target);
            int count = 426418;
            while (reader.hasNext()) {
                if (count == 0) {
                    break;
                }
                SmxkXhsOpusDailyDataAvro data = reader.next();
                writer.append(data);
                writer.append(data);
                writer.append(data);
                writer.append(data);
                writer.append(data);
                count--;
            }
            writer.flush();
        } catch (Exception e) {
            throw new RuntimeException(e);
        }
    }

    public static void copy10GB(File source, File target) throws IOException {
        DatumReader<SmxkXhsOpusDailyDataAvro> datumReader = new SpecificDatumReader<>(SmxkXhsOpusDailyDataAvro.class);
        SpecificDatumWriter<SmxkXhsOpusDailyDataAvro> datumWriter = new SpecificDatumWriter<>(SmxkXhsOpusDailyDataAvro.class);
        try (DataFileReader<SmxkXhsOpusDailyDataAvro> reader = new DataFileReader<>(source, datumReader);
             DataFileWriter<SmxkXhsOpusDailyDataAvro> writer = new DataFileWriter<>(datumWriter)) {
            writer.create(SmxkXhsOpusDailyDataAvro.class.newInstance().getSchema(), target);
            int count = 449388;
            while (reader.hasNext()) {
                if (count == 0) {
                    break;
                }
                SmxkXhsOpusDailyDataAvro data = reader.next();
                writer.append(data);
                data.setNoteId(RandomUtil.randomString(RandomUtil.BASE_CHAR_NUMBER, 24));
                writer.append(data);
                data.setNoteId(RandomUtil.randomString(RandomUtil.BASE_CHAR_NUMBER, 24));
                writer.append(data);
                data.setNoteId(RandomUtil.randomString(RandomUtil.BASE_CHAR_NUMBER, 24));
                writer.append(data);
                data.setNoteId(RandomUtil.randomString(RandomUtil.BASE_CHAR_NUMBER, 24));
                writer.append(data);
                data.setNoteId(RandomUtil.randomString(RandomUtil.BASE_CHAR_NUMBER, 24));
                writer.append(data);
                data.setNoteId(RandomUtil.randomString(RandomUtil.BASE_CHAR_NUMBER, 24));
                writer.append(data);
                data.setNoteId(RandomUtil.randomString(RandomUtil.BASE_CHAR_NUMBER, 24));
                writer.append(data);
                data.setNoteId(RandomUtil.randomString(RandomUtil.BASE_CHAR_NUMBER, 24));
                writer.append(data);
                data.setNoteId(RandomUtil.randomString(RandomUtil.BASE_CHAR_NUMBER, 24));
                writer.append(data);
                data.setNoteId(RandomUtil.randomString(RandomUtil.BASE_CHAR_NUMBER, 24));
                writer.append(data);
                data.setNoteId(RandomUtil.randomString(RandomUtil.BASE_CHAR_NUMBER, 24));
                writer.append(data);
                data.setNoteId(RandomUtil.randomString(RandomUtil.BASE_CHAR_NUMBER, 24));
                writer.append(data);
                data.setNoteId(RandomUtil.randomString(RandomUtil.BASE_CHAR_NUMBER, 24));
                writer.append(data);
                data.setNoteId(RandomUtil.randomString(RandomUtil.BASE_CHAR_NUMBER, 24));
                writer.append(data);
                data.setNoteId(RandomUtil.randomString(RandomUtil.BASE_CHAR_NUMBER, 24));
                writer.append(data);
                data.setNoteId(RandomUtil.randomString(RandomUtil.BASE_CHAR_NUMBER, 24));
                writer.append(data);
                data.setNoteId(RandomUtil.randomString(RandomUtil.BASE_CHAR_NUMBER, 24));
                writer.append(data);
                data.setNoteId(RandomUtil.randomString(RandomUtil.BASE_CHAR_NUMBER, 24));
                writer.append(data);
                data.setNoteId(RandomUtil.randomString(RandomUtil.BASE_CHAR_NUMBER, 24));
                writer.append(data);
                data.setNoteId(RandomUtil.randomString(RandomUtil.BASE_CHAR_NUMBER, 24));
                writer.append(data);
                data.setNoteId(RandomUtil.randomString(RandomUtil.BASE_CHAR_NUMBER, 24));
                writer.append(data);
                data.setNoteId(RandomUtil.randomString(RandomUtil.BASE_CHAR_NUMBER, 24));
                writer.append(data);
                data.setNoteId(RandomUtil.randomString(RandomUtil.BASE_CHAR_NUMBER, 24));
                writer.append(data);
                data.setNoteId(RandomUtil.randomString(RandomUtil.BASE_CHAR_NUMBER, 24));
                writer.append(data);
                data.setNoteId(RandomUtil.randomString(RandomUtil.BASE_CHAR_NUMBER, 24));
                writer.append(data);
                count--;
            }
            writer.flush();
        } catch (Exception e) {
            throw new RuntimeException(e);
        }
    }
}
