package com.atguigu.hadoop.examples.mapreduce;


import org.apache.hadoop.conf.Configured;
import org.apache.hadoop.fs.FSDataInputStream;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.IntWritable;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.io.WritableComparable;
import org.apache.hadoop.mapreduce.*;
import org.apache.hadoop.mapreduce.lib.input.CombineFileInputFormat;
import org.apache.hadoop.mapreduce.lib.input.CombineFileRecordReader;
import org.apache.hadoop.mapreduce.lib.input.CombineFileSplit;
import org.apache.hadoop.mapreduce.lib.input.FileInputFormat;
import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat;
import org.apache.hadoop.mapreduce.lib.reduce.IntSumReducer;
import org.apache.hadoop.util.LineReader;
import org.apache.hadoop.util.Tool;
import org.apache.hadoop.util.ToolRunner;

import java.io.DataInput;
import java.io.DataOutput;
import java.io.IOException;
import java.util.Objects;
import java.util.StringTokenizer;

public class MultiFileWordCount extends Configured
        implements Tool
{
    private void printUsage()
    {
        System.out.println("Usage : multifilewc <input_dir> <output>");
    }

    public int run(String[] args) throws Exception
    {
        if (args.length < 2) {
            printUsage();
            return 2;
        }

        Job job = Job.getInstance(getConf());
        job.setJobName("MultiFileWordCount");
        job.setJarByClass(MultiFileWordCount.class);

        job.setInputFormatClass(MyInputFormat.class);

        job.setOutputKeyClass(Text.class);

        job.setOutputValueClass(IntWritable.class);

        job.setMapperClass(MapClass.class);

        job.setCombinerClass(IntSumReducer.class);
        job.setReducerClass(IntSumReducer.class);

        FileInputFormat.addInputPaths(job, args[0]);
        FileOutputFormat.setOutputPath(job, new Path(args[1]));

        return job.waitForCompletion(true) ? 0 : 1;
    }

    public static void main(String[] args) throws Exception {
        int ret = ToolRunner.run(new MultiFileWordCount(), args);
        System.exit(ret);
    }

    public static class MapClass extends Mapper<MultiFileWordCount.WordOffset, Text, Text, IntWritable>
    {
        private static final IntWritable one = new IntWritable(1);
        private Text word = new Text();

        public void map(MultiFileWordCount.WordOffset key, Text value, Mapper<MultiFileWordCount.WordOffset, Text, Text, IntWritable>.Context context)
                throws IOException, InterruptedException
        {
            String line = value.toString();
            StringTokenizer itr = new StringTokenizer(line);
            while (itr.hasMoreTokens()) {
                this.word.set(itr.nextToken());
                context.write(this.word, one);
            }
        }
    }

    public static class CombineFileLineRecordReader extends RecordReader<MultiFileWordCount.WordOffset, Text>
    {
        private long startOffset;
        private long end;
        private long pos;
        private FileSystem fs;
        private Path path;
        private MultiFileWordCount.WordOffset key;
        private Text value;
        private FSDataInputStream fileIn;
        private LineReader reader;

        public CombineFileLineRecordReader(CombineFileSplit split, TaskAttemptContext context, Integer index)
                throws IOException
        {
            this.path = split.getPath(index.intValue());
            this.fs = this.path.getFileSystem(context.getConfiguration());
            this.startOffset = split.getOffset(index.intValue());
            this.end = (this.startOffset + split.getLength(index.intValue()));
            boolean skipFirstLine = false;

            this.fileIn = this.fs.open(this.path);
            if (this.startOffset != 0L) {
                skipFirstLine = true;
                this.startOffset -= 1L;
                this.fileIn.seek(this.startOffset);
            }
            this.reader = new LineReader(this.fileIn);
            if (skipFirstLine) {
                this.startOffset += this.reader.readLine(new Text(), 0,
                        (int)Math.min(2147483647L, this.end - this.startOffset));
            }

            this.pos = this.startOffset;
        }

        public void initialize(InputSplit split, TaskAttemptContext context) throws IOException, InterruptedException {
        }

        public void close() throws IOException {
        }

        public float getProgress() throws IOException {
            if (this.startOffset == this.end) {
                return 0.0F;
            }
            return Math.min(1.0F, (float)(this.pos - this.startOffset) / (float)(this.end - this.startOffset));
        }

        public boolean nextKeyValue() throws IOException
        {
            if (this.key == null) {
                this.key = new MultiFileWordCount.WordOffset();
              //  MultiFileWordCount.WordOffset.access$002(this.key, this.path.getName());
            }
          //  MultiFileWordCount.WordOffset.access$102(this.key, this.pos);
            if (this.value == null) {
                this.value = new Text();
            }
            int newSize = 0;
            if (this.pos < this.end) {
                newSize = this.reader.readLine(this.value);
                this.pos += newSize;
            }
            if (newSize == 0) {
                this.key = null;
                this.value = null;
                return false;
            }
            return true;
        }

        public MultiFileWordCount.WordOffset getCurrentKey()
                throws IOException, InterruptedException
        {
            return this.key;
        }

        public Text getCurrentValue() throws IOException, InterruptedException {
            return this.value;
        }
    }

    public static class MyInputFormat extends CombineFileInputFormat<MultiFileWordCount.WordOffset, Text>
    {
        public RecordReader<MultiFileWordCount.WordOffset, Text> createRecordReader(InputSplit split, TaskAttemptContext context)
                throws IOException
        {
            return new CombineFileRecordReader((CombineFileSplit)split, context, MultiFileWordCount.CombineFileLineRecordReader.class);
        }
    }

    public static class WordOffset
            implements WritableComparable
    {
        private long offset;
        private String fileName;

        public void readFields(DataInput in)
                throws IOException
        {
            this.offset = in.readLong();
            this.fileName = Text.readString(in);
        }

        public void write(DataOutput out) throws IOException {
            out.writeLong(this.offset);
            Text.writeString(out, this.fileName);
        }

        public int compareTo(Object o) {
            WordOffset that = (WordOffset)o;

            int f = this.fileName.compareTo(that.fileName);
            if (f == 0) {
                return (int)Math.signum(this.offset - that.offset);
            }
            return f;
        }

        public boolean equals(Object obj) {
            if ((obj instanceof WordOffset))
                return compareTo(obj) == 0;
            return false;
        }

        @Override
        public int hashCode() {
            return Objects.hash(offset, fileName);
        }
    }
}