package skewreduce.dfof;

import org.apache.hadoop.conf.Configured;
import org.apache.hadoop.fs.FSDataInputStream;
import org.apache.hadoop.fs.FSDataOutputStream;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.LocalFileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.LongWritable;
import org.apache.hadoop.mapreduce.Job;
import org.apache.hadoop.mapreduce.Mapper;
import org.apache.hadoop.mapreduce.Reducer;
import org.apache.hadoop.mapreduce.lib.input.FileInputFormat;
import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat;
import org.apache.hadoop.util.Tool;
import org.apache.hadoop.util.ToolRunner;

import skewreduce.framework.WritableInputFormat;
import skewreduce.framework.WritableOutputFormat;
import skewreduce.framework.physical.PhysicalOp;
import skewreduce.framework.physical.PPartitionOp.TuplePartitioner;
import skewreduce.lib.Cube2;
import skewreduce.lib.Partition;
import skewreduce.lib.PartitionInfo;
import skewreduce.lib.PhysicsParticle;
import skewreduce.lib.Point3D;



public class PartitionData extends Configured implements Tool {
	
    public static class ParticleInputFormat
    extends WritableInputFormat<LongWritable,PhysicsParticle> {
        private LongWritable key = new LongWritable();

        public LongWritable createKey() { return key; }
        public PhysicsParticle createValue() { return new PhysicsParticle(); }
        public int getKeySize() { return 8; }
        public int getValueSize() { return 32; }
    }

    public static class ParticleOutputFormat 
    extends WritableOutputFormat<LongWritable,PhysicsParticle> {}

    public static class CubePartitioner
    extends TuplePartitioner<LongWritable,PhysicsParticle> {}
    
    private void addPartitionFile(String fn) {
    	String files = getConf().get("tmpfiles");
    	if ( files == null || files.length() == 0 ) {
    		getConf().set("tmpfiles", fn);
    	} else {
    		getConf().set("tmpfiles", files + "," + fn);
    	}
    }

    @Override
    public int run(String[] args) throws Exception {
		boolean bad = false;
		int i = 0;
		String output = null;
		Path planFile = null;
		int uniformPart = 0;
		int numPartitions = 0;
		
		
		for ( ; i < args.length; ++i ) {
			if ( args[i].charAt(0) != '-' )
				break;
			
			if ( "-output".equals(args[i]) ) {
				output = args[++i];
			} else if ( "-plan".equals(args[i]) ) {
				planFile = new Path(args[++i]);
			} else if ( "-uniform".equals(args[i]) ) {
				uniformPart = Integer.parseInt(args[++i]);
			} else {
				System.err.println("Unknown option: "+args[i]);
				bad = true;
			}
		}
		
		if ( bad ) {
			printUsage();
			System.exit(1);
		}
    	
        if ( uniformPart > 0 ) {
        	// write out uniform partition plan
        	Cube2 root = new Cube2(0,1.0f);
        	// adjust depth
        	planFile = new Path(String.format("uniformPartitionInfo-%d",uniformPart));

        	int depth = 0;
        	numPartitions = 1;
        	while ( uniformPart > 1 ) {
        		uniformPart >>>= 1;
        		numPartitions <<= 1;
        		++depth;
        	}
        	
        	System.out.println("Depth = "+depth+"; # partitions = "+numPartitions);
	
        	root.uniformSplit(new Partition.DepthCondition(depth));
        	
        	LocalFileSystem fs = FileSystem.getLocal(getConf());
        	FSDataOutputStream out = fs.create(planFile, true);
        	root.write(out);
        	out.close();        	
        } else if ( planFile != null ) {
        	LocalFileSystem fs = FileSystem.getLocal(getConf());
        	if ( ! fs.exists(planFile) ) {
        	}
        	// load it and check the number of partitions
            Partition root = new Cube2();
            FSDataInputStream fileIn = fs.open(planFile);
            root.readFields(fileIn);
            fileIn.close();

            PartitionInfo partitions = new PartitionInfo(root);
            numPartitions = partitions.size();
        } else {
        	printUsage();
        	return 1;
        }

        getConf().set(PhysicalOp.PARTITION_SPEC_FILE_ATTR,planFile.getName());
    	addPartitionFile(planFile.makeQualified(FileSystem.getLocal(getConf())).toString());
        getConf().setClass(PhysicalOp.PARTITION_CLASS_ATTR, Cube2.class, Partition.class);

        Job job = new Job(getConf(),"Partition-"+planFile.getName());


        job.setJarByClass(PartitionData.class);

        job.setInputFormatClass(ParticleInputFormat.class);
        job.setOutputFormatClass(ParticleOutputFormat.class);

        job.setOutputKeyClass(LongWritable.class);
        job.setOutputValueClass(PhysicsParticle.class);

        job.setMapperClass(Mapper.class);
        job.setReducerClass(Reducer.class);
        job.setPartitionerClass(CubePartitioner.class);
        job.setNumReduceTasks(numPartitions);

        for ( ; i < args.length; ++i ) {
            FileInputFormat.addInputPaths(job,args[i]);
        }
        FileOutputFormat.setOutputPath(job, new Path(output));

        job.submit();

        return job.waitForCompletion(true) ? 0 : -1;
    }
    
	private void printUsage() {
    	System.out.printf("Usage: %s [-plan planFile | -uniform numPartitions] -output outputDir INPUT FILES\n",PartitionData.class);
	}

	public static void main(String[] args) throws Exception {
        int ret = ToolRunner.run(new PartitionData(),args);
        System.exit(ret);
    }
}
