import java.io.IOException;
import java.lang.InterruptedException;
import java.util.StringTokenizer;
import java.util.*;
import java.io.*;
import org.apache.hadoop.io.SequenceFile.CompressionType;
import org.apache.hadoop.io.IntWritable;
import org.apache.hadoop.io.LongWritable;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.mapreduce.Job;
import org.apache.hadoop.mapreduce.Mapper;
import org.apache.hadoop.mapreduce.Reducer;
import org.apache.hadoop.mapreduce.JobContext;
import org.apache.hadoop.mapreduce.lib.input.FileInputFormat;
import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat;
import org.apache.hadoop.mapreduce.lib.output.SequenceFileOutputFormat;
import org.apache.hadoop.mapreduce.lib.output.TextOutputFormat;
import java.util.HashMap;
import java.io.FileOutputStream;
import java.io.FileInputStream;
import java.io.ObjectOutputStream;
import java.io.ObjectInputStream;
import java.io.Serializable;
import org.apache.hadoop.io.MapFile;
import org.apache.hadoop.io.ArrayFile;
import org.apache.hadoop.io.SequenceFile;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.FileStatus;
import java.net.URI;
import org.apache.hadoop.io.SequenceFile;
import org.apache.hadoop.io.SequenceFile.*;
//import org.apache.hadoop.mapreduce.lib.output.MapFileOutputFormat;
import org.apache.hadoop.mapreduce.lib.output.MultipleOutputs;
import org.apache.hadoop.filecache.DistributedCache;
import org.apache.hadoop.io.WritableComparator;
import org.apache.hadoop.mapreduce.Partitioner;


public class TrellisMP {
  static String input = "./input_sequence";
  static String output = "./input_prefix/prefixes";
  static String input_tmp = "./static/";
  static String output_tmp = "./output_tmp/";

  public static class PrefixCreationMapper
      extends Mapper<Object, Text, Text, IntWritable> {

      private final static IntWritable one = new IntWritable(1);
      private Text seqText = new Text();
      private IntWritable seqInt = new IntWritable();
      private MapFile.Reader reader;

      public void setup (Context context)
        throws IOException, InterruptedException {
        reader = new MapFile.Reader(FileSystem.get(context.getConfiguration()), input_tmp, context.getConfiguration());
      }

      public void map(Object key, Text value, Context context)
        throws IOException, InterruptedException {
        int i;
        while (reader.next(seqText, seqInt)) {
          System.out.println(seqText.toString());
          i = value.find(seqText.toString());
          while (i != -1) {
            context.write(seqText, one);
            i = value.find(seqText.toString(),i+1);
          }
        }
      }
  }

  public static class PrefixCreationReducer
      extends Reducer<Text, IntWritable, Text, IntWritable> {

      IntWritable sum = new IntWritable(0);

      public void reduce(Text key, Iterable<IntWritable> values, Context context)
        throws IOException, InterruptedException {
        sum.set(0);
        for (IntWritable value : values) {
          sum.set(value.get() + sum.get());
        }
        context.write(key, sum);
      }
  }

  public static class SuffixTreeMapper
      extends Mapper<LongWritable, Text, SpecialK, SuffixTree> { //pairnei object-text kai dinei text-intwritable

        public static final String FILE_LENGTH = "mapreduce.input.suffixtreemapper.file_length";//WHY?
        long fileLength;
        private ArrayList<String> prefixes;
        private SequenceFile.Reader reader;
        private Text seqText;
        private int max;
        private SpecialK outputKey;

        public void setup(Context context) throws InterruptedException {
          try {
          fileLength = context.getConfiguration().getLong(SuffixTreeMapper.FILE_LENGTH, 0);//unchecked

          prefixes = new ArrayList<String>();
          reader = new SequenceFile.Reader(FileSystem.get(context.getConfiguration()), new Path("./map_final.ser"), context.getConfiguration());
          seqText = new Text();
          while (reader.next(seqText)) {//loop through all prefix-frequency pairs of the last job
            prefixes.add(seqText.toString());
          }

          max = 0;
          for (String prefix: prefixes)
            if (max < prefix.length())
              max = prefix.length();
          System.out.println("maximum prefix length is: " + max);
          } catch (Exception e) {
            System.out.println("ERROR in setup SF " + e.getMessage());
          }

          outputKey = new SpecialK();
        }

        public void map(LongWritable key, Text value, Context context) {
          try {
            System.out.println ("new MAPPER with global position: " + key +"  " + prefixes.size());//MY OFFSET :D :D :D

            for (String prefix: prefixes) { 
              System.out.println(prefix);
//              SuffixTree pst = new SuffixTree(value.toString().substring(0, value.toString().length()-max+prefix.length()) + "$", prefix, key.get(), fileLength);//maybe i dont even need to pregive it
              SuffixTree pst = new SuffixTree(value, max, prefix, key.get(), fileLength);//maybe i dont even need to pregive it
              if (!pst.test()) {
                System.out.println("Suffix Tree FAIL with prefix " + prefix);
//                pst.dumpEdges();
              } else if (pst.getLeavesCount() > 0) {

//                outputKey.setPrefix(Integer.parseInt(pst.getPrefix()));
                outputKey.setPrefix(Long.parseLong(pst.getPrefix()));
                outputKey.setOffset(pst.getGlobalOffset());
                System.out.println("Outputting " + outputKey);
                context.write(outputKey, pst);
              }
            }
          } catch (Exception e) {
            System.out.println("ERROR in Map SF " + e.getMessage());
          }
        }
      
  }
  
  public static class SuffixTreeReducer
      extends Reducer<SpecialK, SuffixTree, Text, SuffixTree> {

      private Path[] localFiles = new Path[0];
      private MultipleOutputs mos;
      private Braf mainBraf;
      private Braf tmpBraf;
      private SuffixTree stable;
      private boolean first;
      private int counter;
      private Text outputKeyReducer;

      public void setup(Context context)
        throws InterruptedException {
        try {
          localFiles = DistributedCache.getLocalCacheFiles(context.getConfiguration());
          mos = new MultipleOutputs(context);
          mainBraf = new Braf(localFiles[0].toString(), "r", 128*1024);
          tmpBraf = new Braf(localFiles[0].toString(), "r", 128*1024);
        } catch (IOException e) {
          System.out.println("CONFIGURE " + e.getMessage());
        }

        outputKeyReducer = new Text();
      }

      public void reduce(SpecialK key, Iterable<SuffixTree> values, Context context)
        throws IOException, InterruptedException {
        System.out.println("NEW REDUCER: " + key);



        first = true;
        counter = 0;
        for (SuffixTree value: values) {
          System.out.println("Merging in " + value.getPrefix() + " " + value.getGlobalOffset());
          if (first) {
            counter += value.getLeavesCount();
            stable = new SuffixTree(value);
            first = false;
          } else {
            counter += value.getLeavesCount();
            Merger.mergeWith(stable, value, mainBraf, tmpBraf);
          }
        }
        stable.setLeavesCount(counter);
        System.out.println("Merging done for " + stable.getPrefix());

        if (!stable.test()) {
          System.out.println("Merged Suffix Tree FAIL with prefix " + key);
          stable.dumpEdges2();
        }

        stable.dumpEdges2();
        outputKeyReducer.set(stable.getPrefix());
        mos.write(stable.getPrefix(), outputKeyReducer, stable);

      }

      public void cleanup(Context context) throws IOException {
        try {
          mos.close();
        } catch (Exception e) {
          System.out.println("EXCEPTION in mos PC " + e.getMessage());
        }
      }
  }


  public static void main(String[] args) throws Exception {

    //TODO: can i use braf with distributed cache?
    System.out.println("USE: PrefixCreation(1/0) threshold mappersPC mappersSTC");
    Configuration conf = new Configuration();

    int length = 4; //initial length of prefix
    IntWritable threshold = new IntWritable(Integer.parseInt(args[1])); //the frequency threshold which filters the list of prefixes

    char[] alphabet = {'1','2','3','4'};

    boolean isDeleted = FileSystem.get(conf).delete(new Path(input_tmp), true);
    isDeleted = FileSystem.get(conf).delete(new Path(output_tmp), true);
    MapFile.Writer writer = new MapFile.Writer(conf, FileSystem.get(conf), input_tmp, Text.class, IntWritable.class);//all the prefixes that the next job must check in the input sequence

    IntWritable zero = new IntWritable(0);
    //creating all prefixes of length 4
    for (char a0: alphabet)
      for (char a1: alphabet)
        for (char a2: alphabet)
          for (char a3: alphabet)
            writer.append(new Text(new String(new char[] {a0,a1,a2,a3})), zero);
    writer.close();

    long sizeOfInput = FileSystem.get(conf).getFileStatus(new Path("./input_sequence/input")).getLen();
    //    int sizeOfCloud = Integer.parseInt(args[1]);

    System.out.println("splitSize: " + sizeOfInput/Integer.parseInt(args[1]));
    if (Integer.parseInt(args[0]) == 1) {
      //HashMap essential since a MapFile can't handle our keys
      HashMap<String, IntWritable> map_final = new HashMap<String, IntWritable>();//holding all the prefixes that check under the threshold for the next stage
      while (true) {
        conf.setInt(FixedLengthInputFormat.FIXED_SPLIT_LENGTH, (int) sizeOfInput/Integer.parseInt(args[2]));//poso pairnei kathe mapper TODO
        conf.setInt(FixedLengthInputFormat.FIXED_EXTRA_LENGTH, length);//TODO poso travaei epipleon tou split length?? oso to megalytero prefix pou evala
        Job job = new Job(conf);
        job.setJobName("Prefix Creation for length " + length);
        job.setJarByClass(TrellisMP.class);
        job.setMapperClass(PrefixCreationMapper.class);

        job.setReducerClass(PrefixCreationReducer.class);
        job.setCombinerClass(PrefixCreationReducer.class);
        job.setMapOutputKeyClass(Text.class);
        job.setMapOutputValueClass(IntWritable.class);


        job.setNumReduceTasks(1);
        job.setOutputKeyClass(Text.class);
        job.setOutputValueClass(IntWritable.class);
        job.setOutputFormatClass(MapFileOutputFormat.class);
        job.setInputFormatClass(FixedLengthInputFormat.class);
        FileInputFormat.addInputPath(job, new Path(input));
        SequenceFileOutputFormat.setOutputPath(job, new Path(output_tmp));

        job.waitForCompletion(true);

        MapFile.Reader reader = new MapFile.Reader(FileSystem.get(conf), output_tmp, conf);
        isDeleted = FileSystem.get(conf).delete(new Path(input_tmp), true);
        //static=ayta pou tha parei o mapper ston epomeno gyro
        MapFile.Writer mfstatic = new MapFile.Writer(conf, FileSystem.get(conf), input_tmp, Text.class, IntWritable.class);//WHY DO I NEED A NEW WRITER???

        Text seqText = new Text();
        IntWritable seqInt = new IntWritable();

        try {
          boolean breaker=true;
          while (reader.next(seqText, seqInt)) {//loop through all prefix-frequency pairs of the last job
            if ((seqInt.compareTo(threshold)) + (seqInt.compareTo(zero)) == 0) {//for a prefix to be accepted it must be greater than zero and less than threshold
              map_final.put(new String(seqText.toString()), new IntWritable(seqInt.get()));
            } else {
              breaker=false;
              for (char a: alphabet) { //if a prefix frequency is greater than threshold, we produce 4 new prefixes of length #prefix+1
                Text tmp = new Text(seqText);
                tmp.append((Character.toString(a)).getBytes(),0,1);
                mfstatic.append(tmp, zero);
              }
            }
          }
          if (breaker) { //all prefixes were below the threshold or zero, the prefix creation phase is over
            int keycounter = 0;
            int valuecounter = 0;
            for (String blah: map_final.keySet()) {
              keycounter++;
              valuecounter += map_final.get(blah).get(); //TODO: getFileStatus, getLen
            }
            System.out.println("#prefixes: " + keycounter + ", total frequency: " + valuecounter);
            break;
          }
        } finally {
          reader.close();
          mfstatic.close();
          isDeleted = FileSystem.get(conf).delete(new Path(output_tmp), true);
        }
        length++;
      }

      SequenceFile.Writer writer22 = new SequenceFile.Writer(FileSystem.get(conf), conf, new Path("./map_final.ser"), Text.class, IntWritable.class);
      Text newkey = new Text();
      for (String key: map_final.keySet()) {
        newkey.set(key);
        writer22.append(newkey, map_final.get(key));
      }
      writer22.close();
    }

    conf.setInt(FixedLengthInputFormat.FIXED_SPLIT_LENGTH, (int) sizeOfInput/Integer.parseInt(args[3]));//poso pairnei kathe mapper TODO
    conf.setInt(FixedLengthInputFormat.FIXED_EXTRA_LENGTH, length);//TODO poso travaei epipleon tou split length?? oso to megalytero prefix pou evala
    conf.setLong(SuffixTreeMapper.FILE_LENGTH, sizeOfInput);
    isDeleted = FileSystem.get(conf).delete(new Path("null_output"), true);
    Job job = new Job(conf);
    job.setJobName("SuffixTreeCreation");
    job.setJarByClass(TrellisMP.class);
    job.setMapperClass(SuffixTreeMapper.class);
    job.setReducerClass(SuffixTreeReducer.class);

    DistributedCache.addCacheFile(new Path("input_sequence/input").toUri(), job.getConfiguration());
    job.setInputFormatClass(FixedLengthInputFormat.class);

    SequenceFile.Reader preader = new SequenceFile.Reader(FileSystem.get(conf), new Path("./map_final.ser"), conf);
    Text seqText = new Text();
    while (preader.next(seqText)) {
      MultipleOutputs.addNamedOutput(job, new String(seqText.toString()), SequenceFileOutputFormat.class, Text.class, SuffixTree.class);//FIX
    }
    preader.close();

    job.setGroupingComparatorClass(SpecialK.GroupComparator.class);
    job.setSortComparatorClass(SpecialK.SortComparator.class);

    job.setProfileEnabled(true);

    job.setMapOutputKeyClass(SpecialK.class);//
    job.setMapOutputValueClass(SuffixTree.class);//
    FileInputFormat.addInputPath(job, new Path("./input_sequence/")); //TODO
    FileOutputFormat.setOutputPath(job, new Path("./null_output/")); //TODO
    job.waitForCompletion(true);

    isDeleted = FileSystem.get(conf).delete(new Path(input_tmp), true);
  }
}
