package org.study.hadoop.md.ch6;

import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.conf.Configured;
import org.apache.hadoop.filecache.DistributedCache;
import org.apache.hadoop.fs.FileStatus;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.LongWritable;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapreduce.Job;
import org.apache.hadoop.mapreduce.Mapper;
import org.apache.hadoop.mapreduce.Reducer;
import org.apache.hadoop.mapreduce.lib.input.TextInputFormat;
import org.apache.hadoop.mapreduce.lib.output.MultipleOutputs;
import org.apache.hadoop.mapreduce.lib.output.TextOutputFormat;
import org.apache.hadoop.mapreduce.lib.reduce.LongSumReducer;
import org.apache.hadoop.util.Tool;
import org.apache.hadoop.util.ToolRunner;
import org.study.hadoop.md.util.MRDPUtils;

import java.io.*;
import java.util.HashMap;
import java.util.Map;
import java.util.zip.GZIPInputStream;

/**
 * 基本作业链：给定由StackOverflow帖子组成的一个数据集，每个用户按照发帖数是高于还是低于平均发帖数分类。<br>
 *     当生成输出时，从一个独立的数据集中获得每个用户的声望以丰富用户信息。
 */
public class JobChainingDriver extends Configured implements Tool {
    // 平均计算
    public static final String AVERAGE_CALC_GROUP = "AverageCalculation";
    public static final String MULTIPLE_OUTPUTS_ABOVE_NAME = "aboveavg";
    public static final String MULTIPLE_OUTPUTS_BELOW_NAME = "belowavg";

    public static class UserIdBinningMapper extends Mapper<Object, Text, Text, Text> {
        public static final String AVERAGE_POSTS_PER_USER = "avg.posts.per.user";

        public static void setAveragePostsPerUser(Job job, double avg) {
            job.getConfiguration().set(AVERAGE_POSTS_PER_USER, Double.toString(avg));
        }

        public static double getAveragePostsPerUser(Configuration conf) {
            return Double.parseDouble(conf.get(AVERAGE_POSTS_PER_USER));
        }

        private double average = 0.0;
        private MultipleOutputs<Text, Text> mos = null;
        private Text outKey = new Text();
        private Text outValue = new Text();
        private HashMap<String, String> userIdToReputation = new HashMap<String, String>();

        @Override
        protected void setup(Context context) throws IOException, InterruptedException {
            average = getAveragePostsPerUser(context.getConfiguration());
            mos = new MultipleOutputs<Text, Text>(context);

            try {
                Path[] files = DistributedCache.getLocalCacheFiles(context.getConfiguration());
                if (files == null || files.length == 0) {
                    throw new RuntimeException("User information is not set in DistributedCache");
                }

                for (Path p : files) {
                    BufferedReader rdr = new BufferedReader(new InputStreamReader(new GZIPInputStream(new FileInputStream(new File(p.toString())))));
                    String line;
                    while ((line = rdr.readLine()) != null) {
                        Map<String, String> parsed = MRDPUtils.transformXmlToMap(line);
                        String userId = parsed.get("Id");
                        String reputation = parsed.get("Reputation");
                        if (userId != null && reputation != null) {
                            userIdToReputation.put(userId, reputation);
                        }
                    }
                }
            } catch (IOException e) {
                throw new RuntimeException(e);
            }
        }

        @Override
        protected void map(Object key, Text value, Context context) throws IOException, InterruptedException {
            String[] tokens = value.toString().split("\t");
            String userId = tokens[0];
            int posts = Integer.parseInt(tokens[1]);
            outKey.set(userId);
            outValue.set((long) posts + "\t" + userIdToReputation.get(userId));
            if ((double) posts < average) {
                mos.write(MULTIPLE_OUTPUTS_BELOW_NAME, outKey, outValue, MULTIPLE_OUTPUTS_BELOW_NAME + "/part");
            } else {
                mos.write(MULTIPLE_OUTPUTS_ABOVE_NAME, outKey, outValue, MULTIPLE_OUTPUTS_ABOVE_NAME + "/psrt");
            }
        }

        @Override
        protected void cleanup(Context context) throws IOException, InterruptedException {
            mos.close();
        }
    }

    public static class UserIdSumReducer extends Reducer<Text, LongWritable, Text, LongWritable> {
        public static final String USERS_COUNTER_NAME = "Users";
        private LongWritable outValue = new LongWritable();

        @Override
        protected void reduce(Text key, Iterable<LongWritable> values, Context context) throws IOException, InterruptedException {
            context.getCounter(AVERAGE_CALC_GROUP, USERS_COUNTER_NAME).increment(1);
            int sum = 0;
            for (LongWritable value : values) {
                sum += value.get();
            }
            outValue.set(sum);
            context.write(key, outValue);
        }
    }

    public static class UserIdCountMapper extends Mapper<Object, Text, Text, LongWritable> {
        public static final String RECORDS_COUNTER_NAME = "Records";
        private static final LongWritable ONE = new LongWritable(1);
        private Text outKey = new Text();

        @Override
        protected void map(Object key, Text value, Context context) throws IOException, InterruptedException {
            Map<String, String> parsed = MRDPUtils.transformXmlToMap(value.toString());
            String userId = parsed.get("OwnerUserId");
            if (userId != null) {
                outKey.set(userId);
                context.write(outKey, ONE);
                context.getCounter(AVERAGE_CALC_GROUP, RECORDS_COUNTER_NAME).increment(1);
            }
        }
    }

    @Override
    public int run(String[] args) throws Exception {
        Path postInput = new Path("post.txt");
        Path userInput = new Path("user.txt");
        Path outputDirIntermediate = new Path("u_int");
        Path outputDir = new Path("out");
        FileSystem fs = outputDir.getFileSystem(getConf());
        if (fs.exists(outputDir)) {
            fs.delete(outputDir, true);
        }

        Job countingJob = Job.getInstance(getConf(), "JobChaining-Counting");
        countingJob.setJarByClass(JobChainingDriver.class);

        countingJob.setMapperClass(UserIdCountMapper.class);
        countingJob.setCombinerClass(LongSumReducer.class);
        countingJob.setReducerClass(UserIdSumReducer.class);

        countingJob.setOutputKeyClass(Text.class);
        countingJob.setOutputValueClass(LongWritable.class);

        countingJob.setInputFormatClass(TextInputFormat.class);
        TextInputFormat.addInputPath(countingJob, postInput);

        countingJob.setOutputFormatClass(TextOutputFormat.class);
        TextOutputFormat.setOutputPath(countingJob, outputDirIntermediate);

        int code = countingJob.waitForCompletion(true) ? 0 : 1;

        if (code == 0) {
            double numRecords = countingJob.getCounters().findCounter(AVERAGE_CALC_GROUP, UserIdCountMapper.RECORDS_COUNTER_NAME).getValue();
            double numUsers = countingJob.getCounters().findCounter(AVERAGE_CALC_GROUP, UserIdSumReducer.USERS_COUNTER_NAME).getValue();
            double averagePostsPerUser = numRecords / numUsers;

            Job binningJob = Job.getInstance(getConf(), "JobChaining-Binning");
            binningJob.setJarByClass(JobChainingDriver.class);
            binningJob.setMapperClass(UserIdBinningMapper.class);
            UserIdBinningMapper.setAveragePostsPerUser(binningJob, averagePostsPerUser);
            binningJob.setNumReduceTasks(0);
            binningJob.setInputFormatClass(TextInputFormat.class);
            TextInputFormat.addInputPath(binningJob, outputDirIntermediate);

            MultipleOutputs.addNamedOutput(binningJob, MULTIPLE_OUTPUTS_BELOW_NAME, TextOutputFormat.class, Text.class, Text.class);
            MultipleOutputs.addNamedOutput(binningJob, MULTIPLE_OUTPUTS_ABOVE_NAME, TextOutputFormat.class, Text.class, Text.class);
            TextOutputFormat.setOutputPath(binningJob, outputDir);

            FileStatus[] userFiles = FileSystem.get(getConf()).listStatus(userInput);
            for (FileStatus status : userFiles) {
                DistributedCache.addCacheFile(status.getPath().toUri(), binningJob.getConfiguration());
            }
            return binningJob.waitForCompletion(true) ? 0 : 1;
        }
        return 0;
    }

    public static void main(String[] args) throws Exception {
        int res = ToolRunner.run(new Configuration(), new JobChainingDriver(), args);
        System.exit(res);
    }
}
