import java.io.File;
import java.io.IOException;
import java.util.Iterator;
import java.util.StringTokenizer;

import org.apache.hadoop.dfs.DistributedFileSystem;
import org.apache.hadoop.fs.FSDataOutputStream;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.IntWritable;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.io.Writable;
import org.apache.hadoop.io.WritableComparable;
import org.apache.hadoop.mapred.JobClient;
import org.apache.hadoop.mapred.JobConf;
import org.apache.hadoop.mapred.MapReduceBase;
import org.apache.hadoop.mapred.Mapper;
import org.apache.hadoop.mapred.OutputCollector;
import org.apache.hadoop.mapred.Reducer;
import org.apache.hadoop.mapred.Reporter;


public class WordCount {

    static final DistributedFileSystem dfs = getaDFS();
    static final Path maxCountPath = new Path ("xxx/mcp");

    public static DistributedFileSystem getaDFS () {
        try {
            return ProjectMain.getDFS(new JobConf (WordCount.class));
        } catch (IOException e) {
            throw new RuntimeException ("I/O: " + e);
        }
    }

    public static class WCMap extends MapReduceBase implements Mapper {
        private static final IntWritable one = new IntWritable (1);
        private static final Text word = new Text();
        public void map(WritableComparable key, Writable value,
                OutputCollector output, Reporter reporter) throws IOException {
            StringTokenizer itr = new StringTokenizer(value.toString(), " \t\n\r\f.");
            while (itr.hasMoreTokens()) {
                String tmp = ProjectMain.scrub(itr.nextToken());
                if (!tmp.equals("")) {
                    word.set(tmp);
                    output.collect(word, one);
                }
            }
        }
    }

    public static class WCReduce extends MapReduceBase implements Reducer {

        private static final int maxCount = getIntFromFile(maxCountPath, dfs);

        @SuppressWarnings("unchecked")
        public void reduce(WritableComparable key, Iterator values,
                OutputCollector output, Reporter reporter) throws IOException {
            int sum = 0;
            while(values.hasNext()) {
                sum += ((IntWritable) values.next()).get();
            }
            if (sum > maxCount)
                output.collect(key, new IntWritable (sum));
        }
    }

    public static class WCCombine extends MapReduceBase implements Reducer {
        @SuppressWarnings("unchecked")
        public void reduce (WritableComparable key, Iterator values,
                OutputCollector output, Reporter reporter) throws IOException {
            int sum = 0;
            while (values.hasNext()) {
                sum += ((IntWritable) values.next()).get();
            }
            output.collect(key, new IntWritable (sum));
        }
    }

    public static void main (String[] args) throws IOException {
        JobConf conf = new JobConf (WordCount.class);
        DistributedFileSystem fs = ProjectMain.getDFS(conf);

        // setup DFS system
        Path root = new Path ("/");
        Path out = new Path ("out");
        Path dfsInput = new Path (args[0]);
        Path localOut = new Path (args[1]);
        conf.setWorkingDirectory(root);
        fs.setWorkingDirectory(root);
        fs.delete(dfsInput);
        fs.copyFromLocalFile(dfsInput, root);
        fs.delete(out);
        conf.setInputPath(dfsInput);
        conf.setOutputPath(out);

        int totalWords = TotalWords.returnTotalWords(dfsInput);
        FSDataOutputStream fmc = fs.create(maxCountPath);
        fmc.writeInt((int) (totalWords * Double.parseDouble(args[2])));
        fmc.close();
        int maxCount = getIntFromFile(maxCountPath, dfs);

        // setup job parameters
        conf.setJobName("Task II: Word Count");
        conf.setOutputKeyClass(Text.class);
        conf.setOutputValueClass(IntWritable.class);
        conf.setMapperClass(WordCount.WCMap.class);
        conf.setCombinerClass(WordCount.WCCombine.class);
        conf.setReducerClass(WordCount.WCReduce.class);
        System.out.println("Total Words: " + totalWords);
        System.out.println(" Stop count: " + maxCount);
        JobClient.runJob(conf);

        // move output back to local FS
        File outFile = new File (args[1]);
        outFile.delete();
        if (!fs.exists(new Path ("out/part-00000")))
        	fs.createNewFile(new Path ("out/part-00000"));
        fs.copyToLocalFile(new Path ("out/part-00000"), localOut);
        fs.delete(out);
        fs.delete(dfsInput);
        fs.delete(new Path ("/xxx"));
    }

    public static int getIntFromFile(Path filename, DistributedFileSystem dfs) {
        try {
            return dfs.open(filename).readInt();
        } catch (IOException e) {
            throw new RuntimeException ("I/O: " + e);
        }
    }
}
