package skewreduce.seaflow;

import java.io.IOException;

import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;

import org.apache.hadoop.conf.Configurable;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.FSDataInputStream;
import org.apache.hadoop.fs.LocalFileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.LongWritable;

import org.apache.hadoop.mapreduce.Job;
import org.apache.hadoop.mapreduce.Mapper;
import org.apache.hadoop.mapreduce.Partitioner;
import org.apache.hadoop.mapreduce.Reducer;

import skewreduce.framework.WritableInputFormat;
import skewreduce.framework.WritableOutputFormat;
import skewreduce.framework.physical.PPartitionOp;
import skewreduce.lib.Cube2;
import skewreduce.lib.Partition;
import skewreduce.lib.PartitionInfo;
import skewreduce.lib.Point3D;




public class SeaflowFoFPartition extends PPartitionOp {
    private static final Log LOG = LogFactory.getLog(SeaflowFoFPartition.class);
    
    public static class ParticleInputFormat
    extends WritableInputFormat<LongWritable,Point3D> {
        private LongWritable key = new LongWritable();

        public LongWritable createKey() { return key; }
        public Point3D createValue() { return new Point3D(); }
        public int getKeySize() { return 8; }
        public int getValueSize() { return 12; }
    }

    public static class Point3DOutputFormat 
    extends WritableOutputFormat<LongWritable,Point3D> {}

    public static class CubePartitioner
    extends TuplePartitioner<LongWritable,Point3D> {}

    protected Job createJob(Configuration conf) throws IOException {
        Job job = new Job(conf);

        job.setJarByClass(SeaflowFoFPartition.class);

        job.setInputFormatClass(ParticleInputFormat.class);
        job.setOutputFormatClass(Point3DOutputFormat.class);

        job.setOutputKeyClass(LongWritable.class);
        job.setOutputValueClass(Point3D.class);

        //job.setMapperClass(Mapper.class);
        //job.setReducerClass(Reducer.class);
        //job.setNumReduceTasks(numPartition);
        //job.setPartitionerClass(CubePartitioner.class);

        return job;
    }
}
