package skewreduce.dfof;

import java.io.IOException;
import java.util.Random;

import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.LongWritable;
import org.apache.hadoop.mapreduce.Job;
import org.apache.hadoop.mapreduce.Mapper;
import org.apache.hadoop.mapreduce.Reducer;
import org.apache.hadoop.mapreduce.lib.input.FileInputFormat;
import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat;
import org.apache.hadoop.util.GenericOptionsParser;

import skewreduce.framework.WritableInputFormat;
import skewreduce.framework.WritableOutputFormat;
import skewreduce.lib.PhysicsParticle;
import skewreduce.lib.Point3D;



public class Subsample {
    public static class ParticleInputFormat
    extends WritableInputFormat<LongWritable,Point3D> {
        private LongWritable key = new LongWritable();

        public LongWritable createKey() { return key; }
        public Point3D createValue() { return new Point3D(); }
        public int getKeySize() { return 8; }
        public int getValueSize() { return 12; }
    }
    
    public static class PhysicsParticleInputFormat
    extends WritableInputFormat<LongWritable,PhysicsParticle> {
        private LongWritable key = new LongWritable();

        public LongWritable createKey() { return key; }
        public PhysicsParticle createValue() { return new PhysicsParticle(); }
        public int getKeySize() { return 8; }
        public int getValueSize() { return 32; }    	
    }

    public static class Point3DOutputFormat 
    extends WritableOutputFormat<LongWritable,Point3D> {}
    
    public static class PhysicsParticleOutputFormat
    extends WritableOutputFormat<LongWritable,PhysicsParticle> {}

    public static class SampleMapper<V>
    extends Mapper<LongWritable,V,LongWritable,V> {
        private float sampleRate;
        private Random random;

        @Override
        protected void setup(Context context) 
        throws InterruptedException,IOException {
            super.setup(context);
            Configuration conf = context.getConfiguration();
            sampleRate = conf.getFloat("samplerate",0.125f);
            random = new Random();
        }

        @Override
        protected void map(LongWritable key, V value, Context context)
        throws IOException, InterruptedException {
            if ( random.nextFloat() < sampleRate )
                context.write( key, value);
        }
    }
    
    public static void main(String[] args) throws Exception {
        GenericOptionsParser options = new GenericOptionsParser(new Configuration(),args);
        Configuration conf = options.getConfiguration();
        String[] rest = options.getRemainingArgs();
        boolean bad = false;

        Job job = createJob(conf);
        Configuration jobConf = job.getConfiguration();

        for ( int i = 0; i < args.length; ++i ) {
            if ( "-input".equals(args[i]) ) {
                if ( i+1 < args.length ) {
                    FileInputFormat.addInputPath(job,new Path(args[++i]));
                } else {
                    System.err.println("missing -input parameter");
                    bad = true;
                }
            } else if ( "-output".equals(args[i]) ) {
                if ( i+1 < args.length ) {
                    FileOutputFormat.setOutputPath(job,new Path(args[++i]));
                } else {
                    System.err.println("missing -output parameter");
                    bad = true;
                }
            } else if ( "-sample".equals(args[i]) ) {
                if ( i+1 < args.length ) {
                    float rate = Float.parseFloat(args[++i]);
                    jobConf.setFloat("samplerate",rate);
                } else {
                    System.err.println("missing -sample parameter");
                    bad = true;
                }
            } else if ( "-merge".equals(args[i]) ) {
            	job.setNumReduceTasks(1); // produce a single output file
            	job.setReducerClass(Reducer.class);
            } else if ( "-point".equals(args[i]) ) {
            	job.setInputFormatClass(ParticleInputFormat.class);
            	job.setOutputFormatClass(Point3DOutputFormat.class);
            	job.setOutputValueClass(Point3D.class);
            } else if ( "-particle".equals(args[i]) ) {
            	job.setInputFormatClass(PhysicsParticleInputFormat.class);
            	job.setOutputFormatClass(PhysicsParticleOutputFormat.class);
            	job.setOutputValueClass(PhysicsParticle.class);
            } else {
                System.err.println("Unknown option: "+args[i]);
            }
        }

        if ( bad ) {
            System.exit(-1);
        }

        job.submit();

        System.exit( job.waitForCompletion(true) ? 0 : -1 );
    }

    protected static Job createJob(Configuration conf) throws IOException {
        Job job = new Job(conf);

        job.setJarByClass(Subsample.class);
        job.setOutputKeyClass(LongWritable.class);
        job.setMapperClass(SampleMapper.class);
        job.setNumReduceTasks(0);

        return job;
    }
}
