package com.atguigu.hadoop.examples.mapreduce.terasort;


import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FSDataInputStream;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapreduce.*;
import org.apache.hadoop.mapreduce.lib.input.FileInputFormat;
import org.apache.hadoop.mapreduce.lib.input.FileSplit;
import org.apache.hadoop.mapreduce.task.TaskAttemptContextImpl;
import org.apache.hadoop.util.IndexedSortable;
import org.apache.hadoop.util.QuickSort;
import org.apache.hadoop.util.StringUtils;

import java.io.DataOutputStream;
import java.io.EOFException;
import java.io.IOException;
import java.util.ArrayList;
import java.util.List;

public class TeraInputFormat extends FileInputFormat<Text, Text>
{
    static final String PARTITION_FILENAME = "_partition.lst";
    static final int KEY_LENGTH = 10;
    static final int VALUE_LENGTH = 90;
    static final int RECORD_LENGTH = 100;
    private static MRJobConfig lastContext = null;
    private static List<InputSplit> lastResult = null;

    public static void writePartitionFile(final JobContext job, Path partFile)
            throws Throwable {
        long t1 = System.currentTimeMillis();
        Configuration conf = job.getConfiguration();
        final TeraInputFormat inFormat = new TeraInputFormat();
        final TextSampler sampler = new TextSampler();
        int partitions = job.getNumReduceTasks();

        long sampleSize = conf
                .getLong(TeraSortConfigKeys.SAMPLE_SIZE
                        .key(), 100000L);

        final List splits = inFormat.getSplits(job);
        long t2 = System.currentTimeMillis();
        System.out.println("Computing input splits took " + (t2 - t1) + "ms");

        int samples = Math.min(conf
                .getInt(TeraSortConfigKeys.NUM_PARTITIONS
                        .key(), 10), splits
                .size());
        System.out.println("Sampling " + samples + " splits of " + splits.size());
        final long recordsPerSample = sampleSize / samples;
        final int sampleStep = splits.size() / samples;
        Thread[] samplerReader = new Thread[samples];
        SamplerThreadGroup threadGroup = new SamplerThreadGroup("Sampler Reader Thread Group");

        for (int i = 0; i < samples; i++) {
            final int idx = i;
            samplerReader[i] = new Thread(threadGroup, "Sampler Reader " + idx) {
                public void run() {
                    long records = 0L;
                    try {
                        TaskAttemptContext context = new TaskAttemptContextImpl(job
                                .getConfiguration(), new TaskAttemptID());

                        RecordReader reader = inFormat
                                .createRecordReader((InputSplit) splits
                                                .get(sampleStep * idx),
                                        context);

                        reader.initialize((InputSplit) splits.get(sampleStep * idx), context);
                        while (reader.nextKeyValue()) {
                            sampler.addKey(new Text((Text) reader.getCurrentKey()));
                            records += 1L;
                            if (recordsPerSample <= records)
                                break;
                        }
                    } catch (IOException ie) {
                        System.err.println("Got an exception while reading splits " +
                                StringUtils.stringifyException(ie));

                        throw new RuntimeException(ie);
                    } catch (InterruptedException localInterruptedException) {
                    }
                }
            };
            samplerReader[i].start();
        }
        FileSystem outFs = partFile.getFileSystem(conf);
        DataOutputStream writer = outFs.create(partFile, true, 65536, (short) 10, outFs
                .getDefaultBlockSize(partFile));


    }

    public RecordReader<Text, Text> createRecordReader(InputSplit split, TaskAttemptContext context)
            throws IOException
    {
        return new TeraRecordReader();
    }

    public List<InputSplit> getSplits(JobContext job) throws IOException
    {
        if (job == lastContext) {
            return lastResult;
        }

        long t1 = System.currentTimeMillis();
        lastContext = job;
        lastResult = super.getSplits(job);
        long t2 = System.currentTimeMillis();
        System.out.println("Spent " + (t2 - t1) + "ms computing base-splits.");
        if (job.getConfiguration().getBoolean(TeraSortConfigKeys.USE_TERA_SCHEDULER.key(), true))
        {
            TeraScheduler scheduler = new TeraScheduler(
                    (FileSplit[])lastResult
                            .toArray(new FileSplit[0]),
                    job.getConfiguration());
            lastResult = scheduler.getNewFileSplits();
            long t3 = System.currentTimeMillis();
            System.out.println("Spent " + (t3 - t2) + "ms computing TeraScheduler splits.");
        }
        return lastResult;
    }

    static class TeraRecordReader extends RecordReader<Text, Text>
    {
        private FSDataInputStream in;
        private long offset;
        private long length;
        private static final int RECORD_LENGTH = 100;
        private byte[] buffer = new byte[100];
        private Text key;
        private Text value;

        public TeraRecordReader()
                throws IOException
        {
        }

        public void initialize(InputSplit split, TaskAttemptContext context)
                throws IOException, InterruptedException
        {
            Path p = ((FileSplit)split).getPath();
            FileSystem fs = p.getFileSystem(context.getConfiguration());
            this.in = fs.open(p);
            long start = ((FileSplit)split).getStart();

            this.offset = ((100L - start % 100L) % 100L);
            this.in.seek(start + this.offset);
            this.length = ((FileSplit)split).getLength();
        }

        public void close() throws IOException {
            this.in.close();
        }

        public Text getCurrentKey() {
            return this.key;
        }

        public Text getCurrentValue() {
            return this.value;
        }

        public float getProgress() throws IOException {
            return (float)this.offset / (float)this.length;
        }

        public boolean nextKeyValue() throws IOException {
            if (this.offset >= this.length) {
                return false;
            }
            int read = 0;
            while (read < 100) {
                long newRead = this.in.read(this.buffer, read, 100 - read);
                if (newRead == -1L) {
                    if (read == 0) {
                        return false;
                    }
                    throw new EOFException("read past eof");
                }

                read = (int)(read + newRead);
            }
            if (this.key == null) {
                this.key = new Text();
            }
            if (this.value == null) {
                this.value = new Text();
            }
            this.key.set(this.buffer, 0, 10);
            this.value.set(this.buffer, 10, 90);
            this.offset += 100L;
            return true;
        }
    }

    static class SamplerThreadGroup extends ThreadGroup
    {
        private Throwable throwable;

        public SamplerThreadGroup(String s)
        {
            super(s);
        }

        public void uncaughtException(Thread thread, Throwable throwable)
        {
            this.throwable = throwable;
        }

        public Throwable getThrowable() {
            return this.throwable;
        }
    }

    static class TextSampler
            implements IndexedSortable
    {
        private ArrayList<Text> records = new ArrayList();

        public int compare(int i, int j) {
            Text left = (Text)this.records.get(i);
            Text right = (Text)this.records.get(j);
            return left.compareTo(right);
        }

        public void swap(int i, int j) {
            Text left = (Text)this.records.get(i);
            Text right = (Text)this.records.get(j);
            this.records.set(j, left);
            this.records.set(i, right);
        }

        public void addKey(Text key) {
            synchronized (this) {
                this.records.add(new Text(key));
            }
        }

        Text[] createPartitions(int numPartitions)
        {
            int numRecords = this.records.size();
            System.out.println("Making " + numPartitions + " from " + numRecords + " sampled records");

            if (numPartitions > numRecords) {
                throw new IllegalArgumentException("Requested more partitions than input keys (" + numPartitions + " > " + numRecords + ")");
            }

            new QuickSort().sort(this, 0, this.records.size());
            float stepSize = numRecords / numPartitions;
            Text[] result = new Text[numPartitions - 1];
            for (int i = 1; i < numPartitions; i++) {
                result[(i - 1)] = ((Text)this.records.get(Math.round(stepSize * i)));
            }
            return result;
        }
    }
}