package org.sss.mapreduce.example;

import java.util.StringTokenizer;

import org.sss.client.JobEngine;
import org.sss.client.SssClient;
import org.sss.mapreduce.GroupID;
import org.sss.mapreduce.Mapper;
import org.sss.mapreduce.Output;
import org.sss.mapreduce.Reducer;
import org.sss.mapreduce.SssException;
import org.sss.mapreduce.datatype.PackableInt;
import org.sss.mapreduce.datatype.PackableString;

public class WordCount {
  public static class WordCountMapper extends Mapper {
    public void map(Context context,
        PackableInt key, PackableString value,
        Output<PackableString, PackableInt> output) throws Exception {
      StringTokenizer it = new StringTokenizer(value.get());
      while (it.hasMoreTokens()) {
        output.write(new PackableString(it.nextToken()), new PackableInt(1));
      }
    }
  }
  
  public static class WordCountReducer extends Reducer {
    public void reduce(Context context,
        PackableString key, Iterable<PackableInt> values,
        Output<PackableString, PackableInt> output) throws Exception {
      int sum = 0;
      for (PackableInt v: values) {
        sum += v.get();
      }
      output.write(key, new PackableInt(sum));
    } 
  }
  
  public static void main(String[] args) throws SssException {
    SssClient client = new SssClient(args);

    String[] others = client.getArgs();
    GroupID input = GroupID.createFromString(others[0]);

    JobEngine engine = new JobEngine(client);
    try {
      GroupID mapOutput    = GroupID.createRandom(engine);
      GroupID reduceOutput = GroupID.createRandom(engine);

      engine.getJobBuilder("mapper", WordCountMapper.class)
        .addInput(input).addOutput(mapOutput, WordCountReducer.class).build();

      engine.getJobBuilder("reducer", WordCountReducer.class)
        .addInput(mapOutput).addOutput(reduceOutput).build();

      engine.exec();

      System.out.println("output data deployed - " + reduceOutput);
    } finally {
      engine.dispose();
    }
  }
}
