package cn.lyjuan.first.hadoop.demo.ch04;

import cn.lyjuan.base.util.RandomUtils;
import cn.lyjuan.first.hadoop.demo.enums.ChNameEnum;
import cn.lyjuan.first.hadoop.util.FileUtil;
import cn.lyjuan.first.hadoop.util.HDFSUtil;
import org.apache.avro.file.SeekableFileInput;
import org.apache.hadoop.fs.FileStatus;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.IntWritable;
import org.apache.hadoop.io.LongWritable;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.io.WritableComparable;
import org.apache.hadoop.mapreduce.Job;
import org.apache.hadoop.mapreduce.Mapper;
import org.apache.hadoop.mapreduce.Reducer;
import org.apache.hadoop.mapreduce.lib.input.FileInputFormat;
import org.apache.hadoop.mapreduce.lib.input.SequenceFileInputFormat;
import org.apache.hadoop.mapreduce.lib.map.InverseMapper;
import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat;
import org.apache.hadoop.mapreduce.lib.output.SequenceFileOutputFormat;
import org.apache.hadoop.mapreduce.lib.output.TextOutputFormat;

import java.io.BufferedReader;
import java.io.FileReader;
import java.io.IOException;
import java.util.ArrayList;
import java.util.List;
import java.util.StringTokenizer;

/**
 * 统计单词出现的次数，并按词频排序
 */
public class Ch04S06WordCount2 {

    public static class CountDecreasingComparator extends IntWritable.Comparator {
        @Override
        public int compare(WritableComparable a, WritableComparable b) {
            // 默认是降序，要实现升序，取负值即可
            return -super.compare(a, b);
        }

        @Override
        public int compare(byte[] b1, int s1, int l1, byte[] b2, int s2, int l2) {
            return -super.compare(b1, s1, l1, b2, s2, l2);
        }
    }

    /**
     * 解析出数据
     */
    public static class CountMapper extends Mapper<LongWritable, Text, Text, IntWritable> {
        private static IntWritable DEF_COUNT = new IntWritable(1);

        @Override
        protected void map(LongWritable key, Text value, Context context) throws IOException, InterruptedException {
            StringTokenizer lineTokenizer = new StringTokenizer(value.toString());
            while (lineTokenizer.hasMoreTokens()) {
                context.write(new Text(lineTokenizer.nextToken()), DEF_COUNT);
            }
        }
    }

    public static class CountReducer extends Reducer<Text, IntWritable, Text, IntWritable> {
        private static IntWritable counter = new IntWritable(0);

        @Override
        protected void reduce(Text key, Iterable<IntWritable> values, Context context) throws IOException, InterruptedException {
            int sum = 0;
            for (IntWritable v : values) {
                sum += v.get();
            }
            counter.set(sum);
            context.write(key, counter);
        }
    }

    private static List<Path> inFiles = new ArrayList<>();

    private static Path TMP_OUT_PATH = FileUtil.remoteURIPath(ChNameEnum.CH04, Ch04S06WordCount2.class, "tmp");

    private static Path OUT_PATH = FileUtil.remoteURIPath(ChNameEnum.CH04, Ch04S06WordCount2.class, "result");

    /**
     * 生成输入文件
     */
    public static void loadInputFiles() throws Exception {
        String remotePrefix = FileUtil.remotePath(ChNameEnum.CH04, Ch04S06WordCount2.class);

        // 删除目录
        HDFSUtil.del(remotePrefix);
        // 读取源数据内容
        BufferedReader reader = new BufferedReader(new FileReader(FileUtil.docPath(ChNameEnum.CH04, Ch04S01WordCount.class, "source.txt")));
        String line = null;
        while (null != (line = reader.readLine())) {
            Path remotePath = FileUtil.remoteURIPath(ChNameEnum.CH04, Ch04S06WordCount2.class, RandomUtils.uuid());
            inFiles.add(remotePath);
            HDFSUtil.writeRemote(remotePath, line);
            System.out.println("write remote file: " + remotePath.toString());
        }
    }

    public static void main(String[] args) throws Exception {
        // 加载源文件
        loadInputFiles();

//        HDFSUtil.conf().set("mapred.job.tracker", "192.168.1.200:9001");

        // 1. 统计任务
        Job countJob = new Job(HDFSUtil.conf(), "WordCount2-Counter");
        countJob.setUser(HDFSUtil.USER);
        countJob.setJarByClass(Ch04S04Sort.class);
        countJob.setMapperClass(CountMapper.class);

//        job.setCombinerClass(AngleReduce.class);
        countJob.setReducerClass(CountReducer.class);
        countJob.setOutputKeyClass(Text.class);
        countJob.setOutputValueClass(IntWritable.class);
        // 指定分区为2
        countJob.setNumReduceTasks(2);
        /**
         * 指定统计作业输出格式，与排序作业的输入格式对应
         */
        countJob.setOutputFormatClass(SequenceFileOutputFormat.class);
        for (Path i : inFiles)
            FileInputFormat.addInputPath(countJob, i);
        FileOutputFormat.setOutputPath(countJob, TMP_OUT_PATH);
        boolean isOk = countJob.waitForCompletion(true);
        System.out.println("result tmp ==> " + isOk);

        // 2. 排序任务
        Job sortJob = new Job(HDFSUtil.conf(), "WordCount2-Sorter");
        sortJob.setJarByClass(Ch04S06WordCount2.class);
        FileInputFormat.addInputPath(sortJob, TMP_OUT_PATH);
        sortJob.setInputFormatClass(SequenceFileInputFormat.class);
        // 由Hadoop库提供，实现Map后的数据对key和value的交换
        sortJob.setMapperClass(InverseMapper.class);
        // 半 Reducer的个数限定为1，最终输出一个结果文件
        sortJob.setNumReduceTasks(1);
        FileOutputFormat.setOutputPath(sortJob, OUT_PATH);
        sortJob.setOutputKeyClass(IntWritable.class);
        /**
         * Hadoop默认对IntWritable 按升序排序，重写IntWritable.Comparrator 类实现降序排序
         */
        sortJob.setSortComparatorClass(CountDecreasingComparator.class);
        sortJob.setOutputValueClass(Text.class);
        sortJob.setOutputFormatClass(TextOutputFormat.class);
        isOk = sortJob.waitForCompletion(true);

        if (!isOk) return;
        // 成功时生成结果文件，报错时不生成任何文件
        FileStatus[] results = HDFSUtil.ls(OUT_PATH);
        for (FileStatus f : results) {
            System.out.println("====== " + f.getPath().getName() + " ======");
            System.out.println(new String(HDFSUtil.readRemote(f.getPath())));
            System.out.println();
            System.out.println();
        }
    }
}
