//package edu.cmu.ml.rtw.users.matt.randomwalks;

import java.io.IOException;
import java.util.ArrayList;
import java.util.Collections;
import java.util.Iterator;
import java.util.HashMap;
import java.util.List;
import java.util.Map;

import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.conf.Configured;
import org.apache.hadoop.filecache.DistributedCache;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.ArrayWritable;
import org.apache.hadoop.io.IntWritable;
import org.apache.hadoop.io.MapWritable;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.io.Writable;
import org.apache.hadoop.io.compress.GzipCodec;
import org.apache.hadoop.mapred.FileInputFormat;
import org.apache.hadoop.mapred.FileOutputFormat;
import org.apache.hadoop.mapred.JobClient;
import org.apache.hadoop.mapred.JobConf;
import org.apache.hadoop.mapred.MapReduceBase;
import org.apache.hadoop.mapred.Mapper;
import org.apache.hadoop.mapred.OutputCollector;
import org.apache.hadoop.mapred.Reducer;
import org.apache.hadoop.mapred.Reporter;
import org.apache.hadoop.mapred.SequenceFileOutputFormat;
import org.apache.hadoop.mapred.SequenceFileInputFormat;
import org.apache.hadoop.mapred.TextOutputFormat;
import org.apache.hadoop.util.Tool;
import org.apache.hadoop.util.ToolRunner;
import org.apache.log4j.Logger;
import org.apache.mahout.common.IntPairWritable;

//import edu.cmu.ml.rtw.util.Pair;

/**
 * Takes the walk file generated by GraphChiWalk and produces a matrix of (node pair) X
 * (path feature).
 */
public class ConditionalProb extends Configured implements Tool {
    private final static Logger log = Logger.getLogger(ConditionalProb.class);

    public static enum COUNTERS {
        TOO_MANY_HOPS,
        HOP_1,
        HOP_2,
        HOP_3,
        HOP_4,
        HOP_5,
        HOP_6,
        HOP_7,
        HOP_8,
        HOP_9,
        HOP_10,
        HOP_11,
        HOP_12,
        HOP_13,
        HOP_14,
        HOP_15,
        MORE_THAN_15_HOPS,
        NODE_PAIR,
        NO_KEPT_PATHS
    };

    public static class WalkFileMapper extends MapReduceBase implements
            Mapper<IntWritable, IntPairWritable, IntWritable, IntPairWritable> {

        @Override
        public void map(IntWritable key, IntPairWritable value,
                OutputCollector<IntWritable, IntPairWritable> output,
                Reporter reporter) throws IOException {
            output.collect(key, value);
        }
    }

    public static class WalkIdReducer extends MapReduceBase implements
            Reducer<IntWritable, IntPairWritable, IntWritable, IntArrayWritable> {

        @Override
        public void reduce(IntWritable walk_id, Iterator<IntPairWritable> values,
                OutputCollector<IntWritable, IntArrayWritable> output, Reporter reporter)
                throws IOException {
            ArrayList<IntPairWritable> value_array = new ArrayList<IntPairWritable>();
            int count = 0;
            IntPairWritable iter = null;
            while (values.hasNext()) {
                count++;
                if (count > 100) {
                    // Something is wrong here if there are more than 100 steps for a single walk -
                    // abort.  We could probably set this quite a bit lower (like, 10, or whatever
                    // the maxHops parameter was set to previously, but this will at least catch
                    // pathological cases, which is the main point).
                    reporter.getCounter(COUNTERS.TOO_MANY_HOPS).increment(1);
                    log.warn("Found a walk id with " + count + " hops - skipping it");
                    return;
                }
                iter = values.next();
                IntPairWritable pair = new IntPairWritable();
                pair.set(iter.getFirst(), iter.getSecond());
                value_array.add(pair);
            }
            Collections.sort(value_array);

            ArrayList<IntWritable> nodes_array = new ArrayList<IntWritable>();
            for (IntPairWritable pair: value_array) {
                nodes_array.add(new IntWritable(pair.getSecond()));
            }

            output.collect(walk_id, IntArrayWritable.fromArrayList(nodes_array));
        }
    }

    public static class NodePairMapper extends MapReduceBase implements
            Mapper<IntWritable, IntArrayWritable, IntWritable, IntWritable> {
        IntWritable out_key = new IntWritable();
        Text out_value = new Text();
        @Override
        public void map(IntWritable key, IntArrayWritable value,
                OutputCollector<IntWritable, IntWritable> output,
                Reporter reporter) throws IOException {
            ArrayList<IntWritable> values = value.toArrayList();
            reporter.getCounter(getHopCounter(values.size())).increment(1);
            for (int i=0; i<values.size(); i++) {
                IntWritable start_node = values.get(i);
                for (int j=i+1; j<values.size(); j++) {
                    IntWritable node = values.get(j);
                    output.collect(start_node, node);
                }
            }
        }
    }

    public static class NormalizeReducer extends MapReduceBase implements
            Reducer<IntWritable, IntWritable, IntWritable, Text> {

        @Override
        public void reduce(IntWritable source, Iterator<IntWritable> targets,
                OutputCollector<IntWritable, Text> output,
                Reporter reporter) throws IOException {
            int MIN_COUNT = 2;

            reporter.getCounter(COUNTERS.NODE_PAIR).increment(1);
            Map<Integer, Integer> counter = new HashMap<Integer, Integer>();
            while (targets.hasNext()) {
                Integer target = targets.next().get();
                Integer count = counter.get(target);
                if (count == null) {
                    count = 0;
                }
                count += 1;
                counter.put(target, count);
            }

            Map<Integer, Integer> distribution = new HashMap<Integer, Integer>();
            int total = 0;
            for (Map.Entry<Integer, Integer> entry : counter.entrySet()) {
                Integer target = entry.getKey();
                Integer count = entry.getValue();
                if (count >= MIN_COUNT) {
                    distribution.put(target, count);
                    total += count;
                }
            }

            StringBuilder builder = new StringBuilder();
            boolean first = true;
            for (Map.Entry<Integer, Integer> entry : distribution.entrySet()) {
                float prob = ((float)entry.getValue()) / total;

                if (first) {
                    builder.append("{");
                    first = false;
                } else {
                    builder.append(", ");
                }

                builder.append(entry.getKey());
                builder.append(": ");
                builder.append(prob);
            }
            String out_value = builder.toString();

            if (distribution.size() == 0) {
                reporter.getCounter(COUNTERS.NO_KEPT_PATHS).increment(1);
            }
            else {
                output.collect(source, new Text(out_value));
            }
        }
    }

    static int printUsage() {
        System.out.println("Sorry, no usage information yet: TODO");
        ToolRunner.printGenericCommandUsage(System.out);
        return -1;
    }

    static COUNTERS getHopCounter(int size) {
        switch (size) {
            case 1: return COUNTERS.HOP_1;
            case 2: return COUNTERS.HOP_2;
            case 3: return COUNTERS.HOP_3;
            case 4: return COUNTERS.HOP_4;
            case 5: return COUNTERS.HOP_5;
            case 6: return COUNTERS.HOP_6;
            case 7: return COUNTERS.HOP_7;
            case 8: return COUNTERS.HOP_8;
            case 9: return COUNTERS.HOP_9;
            case 10: return COUNTERS.HOP_10;
            case 11: return COUNTERS.HOP_11;
            case 12: return COUNTERS.HOP_12;
            case 13: return COUNTERS.HOP_13;
            case 14: return COUNTERS.HOP_14;
            case 15: return COUNTERS.HOP_15;
        }
        return COUNTERS.MORE_THAN_15_HOPS;
    }

    public int run(String[] args) throws Exception {
        if (args.length != 2) {
            System.out.println("ERROR: Wrong number of parameters: " + args.length
                    + " instead of 2.");
            return printUsage();
        }
        String input_dir = args[0];
        String output_dir = args[1];
        String tmp_dir = "/tmp/mg1/walk_analyzer_intermediate/";
        String tmp_dir2 = "/tmp/mg1/walk_analyzer_intermediate2/";

        // Set up the first map reduce
        JobConf conf = new JobConf(getConf(), ConditionalProb.class);
        conf.setJobName("FirstConditionalProbMapReduce");

        conf.setInputFormat(WalkFileInputFormat.class);
        conf.setMapperClass(WalkFileMapper.class);
        conf.setMapOutputKeyClass(IntWritable.class);
        conf.setMapOutputValueClass(IntPairWritable.class);

        conf.setReducerClass(WalkIdReducer.class);
        conf.setOutputKeyClass(IntWritable.class);
        conf.setOutputValueClass(IntArrayWritable.class);
        conf.setOutputFormat(SequenceFileOutputFormat.class);

        conf.set("mapred.child.java.opts", "-Xmx700M -Xss10M");
        conf.set("mapred.map.max.attempts", "20");
        conf.set("mapred.reduce.max.attempts", "20");
        conf.set("mapred.max.tracker.failures", "20");
        conf.set("mapred.reduce.tasks", "100");
        conf.set("mapred.job.map.memory.mb", "1300");
        conf.set("mapred.job.reduce.memory.mb", "1300");

        FileInputFormat.setInputPaths(conf, new Path(input_dir));
        FileOutputFormat.setOutputPath(conf, new Path(tmp_dir));

        JobClient.runJob(conf);

        // Now the second map reduce
        conf = new JobConf(getConf(), ConditionalProb.class);
        conf.setJobName("SecondConditionalProbMapReduce");

        conf.setInputFormat(SequenceFileInputFormat.class);
        conf.setMapperClass(NodePairMapper.class);
        conf.setMapOutputKeyClass(IntWritable.class);
        conf.setMapOutputValueClass(IntWritable.class);

        conf.setReducerClass(NormalizeReducer.class);
        conf.setOutputKeyClass(IntWritable.class);
        conf.setOutputValueClass(Text.class);
        conf.setOutputFormat(TextOutputFormat.class);

        conf.set("mapred.child.java.opts", "-Xmx1300M -Xss10M");
        conf.set("mapred.map.max.attempts", "20");
        conf.set("mapred.reduce.max.attempts", "20");
        conf.set("mapred.max.tracker.failures", "20");
        conf.set("mapred.reduce.tasks", "100");
        conf.set("mapred.job.map.memory.mb", "2000");
        conf.set("mapred.job.reduce.memory.mb", "2040");

        FileInputFormat.setInputPaths(conf, new Path(tmp_dir));
        FileOutputFormat.setOutputPath(conf, new Path(output_dir));

        JobClient.runJob(conf);

        return 0;
    }

    public static void main(String[] args) throws Exception {
        int res = ToolRunner.run(new Configuration(), new ConditionalProb(), args);
        System.exit(res);
    }

    public static class IntArrayWritable extends ArrayWritable {
        public IntArrayWritable() {
            super(IntWritable.class);
        }

        public IntArrayWritable(IntWritable[] values) {
            super(IntWritable.class, values);
        }

        public static IntArrayWritable fromArrayList(ArrayList<IntWritable> array) {
            IntArrayWritable writable = new IntArrayWritable();
            IntWritable[] values = new IntWritable[array.size()];
            for (int i=0; i<array.size(); i++) {
                values[i] = array.get(i);
            }
            writable.set(values);
            return writable;
        }

        public ArrayList<IntWritable> toArrayList() {
            ArrayList<IntWritable> array = new ArrayList<IntWritable>();
            for (Writable pair : this.get()) {
                IntWritable int_pair = (IntWritable) pair;
                array.add(int_pair);
            }
            return array;
        }
    }

}
