package cn.lyjuan.first.hadoop.demo.ch04;

import cn.lyjuan.base.util.DateUtils;
import cn.lyjuan.base.util.RandomUtils;
import cn.lyjuan.first.hadoop.demo.enums.ChNameEnum;
import cn.lyjuan.first.hadoop.util.FileUtil;
import cn.lyjuan.first.hadoop.util.HDFSUtil;
import org.apache.hadoop.fs.FileStatus;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.LongWritable;
import org.apache.hadoop.io.NullWritable;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapreduce.Job;
import org.apache.hadoop.mapreduce.Mapper;
import org.apache.hadoop.mapreduce.Reducer;
import org.apache.hadoop.mapreduce.lib.input.FileInputFormat;
import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat;

import java.io.IOException;
import java.time.LocalDateTime;
import java.util.ArrayList;
import java.util.List;

/**
 * 数据去重
 */
public class Ch04S03Deduplicate {

    /**
     * 数据处理任务:<br/>
     * 1. 直接输出数据
     */
    public static class DedupMapper extends Mapper<LongWritable, Text, Text, NullWritable> {
        @Override
        protected void map(LongWritable key, Text value, Context context) throws IOException, InterruptedException {
            context.write(value, NullWritable.get());
        }
    }

    /**
     * 数据合并、排序
     */
    public static class DedupReduce extends Reducer<Text, NullWritable, Text, NullWritable> {
        @Override
        protected void reduce(Text key, Iterable<NullWritable> values, Context context) throws IOException, InterruptedException {
            context.write(key, NullWritable.get());
        }
    }

    public static final Path OUT_PATH = FileUtil.remoteURIPath(ChNameEnum.CH04, Ch04S03Deduplicate.class, "result");

    public static final List<Path> inFiles = new ArrayList<>();

    /**
     * 生成HDFS源数据文件
     */
    public static void loadInputFiles() {
        int fileCount = RandomUtils.randomInt(2);
        Path remoteDir = new Path(FileUtil.remotePath(ChNameEnum.CH04, Ch04S03Deduplicate.class));
        HDFSUtil.del(remoteDir);// 清除旧数据
        Path path = null;
        for (int i = 0; i < fileCount; i++) {
            path = FileUtil.remoteURIPath(ChNameEnum.CH04, Ch04S03Deduplicate.class, String.valueOf(i));
            inFiles.add(path);
            int row = RandomUtils.randomInt(2);
            StringBuilder sb = new StringBuilder();
            for (int j = 0; j < row; j++) {
                LocalDateTime date = DateUtils.long2Time(Long.parseLong("1624" + RandomUtils.randomIntStr(9)));
                sb.append("\n").append(DateUtils.format(date, DateUtils.FMT_DATE)).append(" ").append((char) RandomUtils.randomInt(97, 109));
            }
            sb.deleteCharAt(0);
            HDFSUtil.writeRemote(path, sb.toString());
            System.out.println("write remote file: " + path.toString());
        }
    }

    public static void main(String[] args) throws Exception {
        // 加载源文件
        loadInputFiles();

//        HDFSUtil.conf().set("mapred.job.tracker", "192.168.1.200:9001");

        Job job = new Job(HDFSUtil.conf(), "deduplicate");
        job.setUser(HDFSUtil.USER);
        job.setJarByClass(Ch04S03Deduplicate.class);
        job.setMapperClass(DedupMapper.class);

//        job.setCombinerClass(AngleReduce.class);
        job.setReducerClass(DedupReduce.class);
        job.setOutputKeyClass(Text.class);
        job.setOutputValueClass(NullWritable.class);
        for (Path i : inFiles)
            FileInputFormat.addInputPath(job, i);
        FileOutputFormat.setOutputPath(job, OUT_PATH);
        boolean isOk = job.waitForCompletion(true);
        System.out.println("result ==> " + isOk);
        if (!isOk) return;
        // 成功时生成结果文件，报错时不生成任何文件
        FileStatus[] results = HDFSUtil.ls(OUT_PATH);
        for (FileStatus f : results) {
            System.out.println("====== " + f.getPath().getName() + " ======");
            System.out.println(new String(HDFSUtil.readRemote(f.getPath())));
            System.out.println();
            System.out.println();
        }
    }
}
