package skewreduce.framework.planner;

import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collections;
import java.util.Comparator;
import java.util.List;

import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.util.ReflectionUtils;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;

import skewreduce.lib.Cube2;
import skewreduce.lib.Histogram;
import skewreduce.lib.ITuple;
import skewreduce.lib.KahanSum;
import skewreduce.lib.Partition;
import skewreduce.lib.Point3D;
import skewreduce.lib.Util;


public class Cube2PartitionerCollection {
	private static final Logger LOG = LoggerFactory.getLogger("skewreduce.Partition");
	
	static final float MIN_LENGTH_FACTOR = 4.0f;
	
	static abstract class Cube2Partitioner implements Partitioner {

		protected Configuration conf;
		protected float eps;
		protected EaggCostModel model;
		
		@Override
		public void init(EaggCostModel model) {
			this.model = model;
		}

		protected PartitionNode[] splitPartition(PartitionNode parent,int axis,float split) {
			return splitPartition(parent,axis,split,0.5f);
		}

		protected PartitionNode[] splitPartition(PartitionNode parent,int axis,float split, float prob) {
			Cube2 p = (Cube2)parent.getPartition();
			p.setSplit(axis,split,prob);
            
            ArrayList<ITuple> ls = new ArrayList<ITuple>();
            ArrayList<ITuple> rs = new ArrayList<ITuple>();
            
            for ( ITuple t : parent.getSample() ) {
            	float v = t.getFloat(axis);
            	if ( v < split ) {
            		ls.add(t);
            	} else if ( v > split ) {
            		rs.add(t);
            	} else if ( Math.random() < prob ) {
            		ls.add(t);
            	} else {
            		rs.add(t);
            	}
            }
            
            ls.trimToSize();
            rs.trimToSize();
            
            /*
			if ( LOG.isDebugEnabled() ) {
				// do sanity check
				for ( ITuple t : ls ) {
					if ( t.getFloat(axis) > split ) {
						LOG.error("Wrong split!: {} - less than {} expected along axis {}",new Object[] { t, split, axis });
						throw new IllegalStateException();
					}
				}
				for ( ITuple t : rs ) {
					if ( t.getFloat(axis) < split ) {
						LOG.error("Wrong split!: {} - greater than {} expected along axis {}",new Object[] { t, split, axis });
						throw new IllegalStateException();
					}
				}
			} 
			*/       
            
            return new PartitionNode[] { new PartitionNode(parent,p.getLeft(),ls), new PartitionNode(parent,p.getRight(),rs) };
		}

		@Override
		public Configuration getConf() {
			return conf;
		}

		@Override
		public void setConf(Configuration conf) {
			this.conf = conf;
			eps = conf.getFloat("skewreduce.planner.partitioner.cube2.eps",0.0f);
		}
	}

	static class Cube2UniformPartitioner extends Cube2Partitioner {
		
		class Axis implements Comparable<Axis> {
			final int index;
			final float mv;
			final float Mv;
			final float midv;
			int count;
			
			Axis(int idx,ITuple m,ITuple M) {
				index = idx;
				mv = m.getFloat(index);
				Mv = M.getFloat(index);
				midv = (mv+Mv)*0.5f;
			}
			
			public int getIndex() { return index; }
			public float getLength() { return Mv - mv; }
			public int getCount() { return count; }
			public float getSplit() { return midv; }

			public void accumulate(ITuple t) {
				final float v = t.getFloat(index);
				count += (( v < midv ) ? -1 : 1);
			}
			
			@Override
			public int compareTo(Axis o) {
				return Math.abs(count) - Math.abs(o.count);
			}
		}
		
		@Override
		public PartitionNode[] partition(PartitionNode root) {
			Cube2 p = (Cube2)root.getPartition();
			Point3D mp = p.getMinPoint();
			Point3D Mp = p.getMaxPoint();
			
			List<Axis> axes = new ArrayList<Axis>(3);
			for ( int i = 0; i < 3; ++i ) {
				Axis a = new Axis(i,mp,Mp);
				if ( a.getLength() > 2*MIN_LENGTH_FACTOR*eps ) {
					axes.add(a);
				}
			}
			
			if ( axes.isEmpty() ) return null; // can't split more
			
			for ( ITuple t : root.getSample() ) {
				for ( Axis a : axes )
					a.accumulate(t);
			}
			
			Collections.sort(axes);
			
			if ( LOG.isDebugEnabled() ) {
				for ( Axis a : axes ) {
					LOG.debug("axis {}: count = {}",a.getIndex(),Math.abs(a.getCount()));
				}
			}
			
			Axis axis = axes.get(0);
			return splitPartition(root,axis.getIndex(),axis.getSplit());
		}
	}
	
	static class Cube2MedianPartitioner extends Cube2Partitioner {
		class Axis implements Comparable<Axis> {
			final int index;
			double var;
			final float mv;
			final float Mv;
			final KahanSum s2 = new KahanSum();
			final KahanSum s = new KahanSum();
			
			Axis(int idx,ITuple m,ITuple M) {
				index = idx;
				mv = m.getFloat(index);
				Mv = M.getFloat(index);
			}
			
			public int getIndex() { return index; }
			public float getLength() { return Mv - mv; }

			public void accumulate(ITuple t) {
				final float v = t.getFloat(index);
				s2.add(v*v);
				s.add(v);
			}
			
			public void doFinal(int n) {
				double u = s.value()/n;
				var = (s2.value()/n) - u*u;
			}
			
			@Override
			public int compareTo(Axis o) {
				return (int)Math.signum(o.var - var);
			}
			
			public float adjustSplit(float v) {
				final float limit = MIN_LENGTH_FACTOR * eps;
				if ( (v - mv)  < limit ) {
					// should adjust minimum
					return (mv + limit) - v;
					
				} else if ( (Mv - v) < limit ) {
					// should adjust maximum
					return (Mv - limit) - v;
				}
				return 0.0f; // nothing to adjust
			}
		}
		
		@Override
		public PartitionNode[] partition(PartitionNode root) {
			// pick median along the most sparse axis (i.e., max variance)
			Cube2 p = (Cube2)root.getPartition();
			Point3D mp = p.getMinPoint();
			Point3D Mp = p.getMaxPoint();
			
			List<Axis> axes = new ArrayList<Axis>(3);
			for ( int i = 0; i < 3; ++i ) {
				Axis a = new Axis(i,mp,Mp);
				if ( a.getLength() > 2*MIN_LENGTH_FACTOR*eps ) {
					axes.add(a);
				}
			}
			
			if ( axes.isEmpty() ) return null; // can't split more

			List<ITuple> samples = root.getSample();
			int n = samples.size();
			
			for ( ITuple t : samples ) {
				for ( Axis a : axes )
					a.accumulate(t);
			}
			
			for ( Axis a : axes ) {
				a.doFinal(n);
				if ( LOG.isDebugEnabled() ) {
					LOG.debug("Axis {}: variance = {}",a.index,a.var);
				}
			}
			
			// pick axis
			// first, by maximum variance.
			// second, ensure the length is at least 3*eps long
			// if 

			Collections.sort(axes);			
			
			Axis axis = axes.get(0);
			int med = n >> 1;
			
			Util.findKMedian(samples, med+1, 0, n-1, axis.getIndex());
			float split = samples.get(med).getFloat(axis.getIndex());
			float adjust = axis.adjustSplit(split);
			if ( adjust != 0 ) {
				if ( LOG.isDebugEnabled() )
					LOG.debug("adjust split value from {} to {}",split,split+adjust);
				split += adjust;
				// now repartition the samples according to new split value
				int rc;
				if ( adjust < 0 ) {
					rc = Util.partition(samples,0,med,split,axis.getIndex());
				} else {
					rc = Util.partition(samples, med+1, n-1, split, axis.getIndex());
				}
				if ( rc < 0 ) {
					LOG.error("Failed to partition?!");
				} else {
					med = rc;
				}
			}
			
			List<ITuple> ls = samples.subList(0, med);
			List<ITuple> rs = samples.subList(med, n);
			
			/*
			if ( LOG.isDebugEnabled() ) {
				LOG.debug("Samples are partitioned into [0,{}) and [{},{})",new Object[] { med,med,n });
				// do sanity check
				for ( ITuple t : ls ) {
					if ( t.getFloat(axis.getIndex()) > split ) {
						LOG.error("Wrong split!: {} - less than {} expected along axis {}",new Object[] { t, split, axis.getIndex() });
						throw new IllegalStateException();
					}
				}
				for ( ITuple t : rs ) {
					if ( t.getFloat(axis.getIndex()) < split ) {
						LOG.error("Wrong split!: {} - greater than {} expected along axis {}",new Object[] { t, split, axis.getIndex() });
						LOG.error("Median? {}",samples.get(med));
						throw new IllegalStateException();
					}
				}
			}
			*/
			
			p.setSplit(axis.getIndex(), split);

			return new PartitionNode[] { new PartitionNode(root,p.getLeft(),ls), new PartitionNode(root,p.getRight(),rs) };
		}
	}
	
	
	static class Cube2HierarchicalUniformPartitioner extends Cube2Partitioner {
		
		@Override
		public PartitionNode[] partition(PartitionNode root) {
			Cube2 p = (Cube2)root.getPartition();
			Point3D mp = p.getMinPoint();
			Point3D Mp = p.getMaxPoint();
			
			int axis = p.getLevel() % 3;
			
			if ( (Mp.getFloat(axis) - mp.getFloat(axis)) <= 2*MIN_LENGTH_FACTOR*eps ) {
				return null;
			}
			
			float split = ( mp.getFloat(axis) + Mp.getFloat(axis) ) * 0.5f;
			return splitPartition(root,axis,split);
		}
	}
	
	
	static class Cube2HierarchicalMedianPartitioner extends Cube2Partitioner {		
		@Override
		public PartitionNode[] partition(PartitionNode root) {
			// pick median along the most sparse axis (i.e., max variance)
			Cube2 p = (Cube2)root.getPartition();
			Point3D mp = p.getMinPoint();
			Point3D Mp = p.getMaxPoint();
			
			List<ITuple> samples = root.getSample();
			int n = samples.size();
			int med = n >> 1;
			int axis = p.getLevel() % 3;
			float mv = mp.getFloat(axis);
			float Mv = Mp.getFloat(axis);
			
			if ( (Mv - mv) <= 2*MIN_LENGTH_FACTOR*eps ) {
				return null;
			}
				
			Util.findKMedian(samples, med+1, 0, n-1, axis);
			float split = samples.get(med).getFloat(axis);
			float adjust = 0.0f;
			{
				final float limit = MIN_LENGTH_FACTOR * eps;
				if ( (split - mv)  < limit ) {
					// should adjust minimum
					adjust = (mv + limit) - split;
					
				} else if ( (Mv - split) < limit ) {
					// should adjust maximum
					adjust = (Mv - limit) - split;
				}
			}			
			
			if ( adjust != 0 ) {
				if ( LOG.isDebugEnabled() )
					LOG.debug("adjust split value from {} to {}",split,split+adjust);
				split += adjust;
				// now repartition the samples according to new split value
				int rc;
				if ( adjust < 0 ) {
					rc = Util.partition(samples,0,med,split,axis);
				} else {
					rc = Util.partition(samples, med+1, n-1, split, axis);
				}
				if ( rc < 0 ) {
					LOG.error("Failed to partition?!");
				} else {
					med = rc;
				}
			}
			
			List<ITuple> ls = samples.subList(0, med);
			List<ITuple> rs = samples.subList(med, n);
			
			p.setSplit(axis, split);

			return new PartitionNode[] { new PartitionNode(root,p.getLeft(),ls), new PartitionNode(root,p.getRight(),rs) };
		}
	}

	static class Cube2PeakPartitioner extends Cube2Partitioner {
		class Axis implements Comparable<Axis> {
			final int index;
			final float mv;
			final float Mv;
			final Histogram h;
			Histogram.Bucket peak;
			
			Axis(int idx,ITuple m,ITuple M) {
				index = idx;
				mv = m.getFloat(index);
				Mv = M.getFloat(index);
				h = new Histogram(mv, Mv, eps);
			}
			
			public int getIndex() { return index; }
			public float getLength() { return Mv - mv; }
			public Histogram getHistogram() { return h; }
			public Histogram.Bucket getPeakBucket() { return peak; }
			public float getSplit() { return (float)peak.getMid(); }
			
			public void build(List<ITuple> data) {
				h.build(data, index);
				int b = h.getMostFrequentBucket();
				peak = h.getBucket(b);
			}
			
			@Override
			public int compareTo(Axis o) {
				return o.peak.getFrequency() - peak.getFrequency();
			}
		}
		
		@Override
		public PartitionNode[] partition(PartitionNode root) {
			// pick median along the most sparse axis (i.e., max variance)
			Cube2 p = (Cube2)root.getPartition();
			Point3D mp = p.getMinPoint();
			Point3D Mp = p.getMaxPoint();
			
			List<Axis> axes = new ArrayList<Axis>(3);
			for ( int i = 0; i < 3; ++i ) {
				Axis a = new Axis(i,mp,Mp);
				if ( a.getLength() > 2*MIN_LENGTH_FACTOR*eps ) {
					axes.add(a);
				}
			}
			
			if ( axes.isEmpty() ) return null; // can't split more

			List<ITuple> samples = root.getSample();
			
			for ( Axis a : axes ) {
				a.build(samples);
			}
			
			for ( Axis a : axes ) {
				if ( LOG.isDebugEnabled() ) {
					LOG.debug("Axis {}: peak bucket = {}",a.index,a.getPeakBucket());
				}
			}
			
			Collections.sort(axes);
			
			Axis axis = axes.get(0);
			Histogram.Bucket peakBucket = axis.getPeakBucket();
			// pick the value which equally split the sample falls into this bucket
			float[] vals = new float[peakBucket.getFrequency()];
			int i = 0;
			for ( ITuple t : samples ) {
				float v = t.getFloat(axis.getIndex());
				if ( peakBucket.contains(v) ) {
					vals[i++] = v;
				}
			}
			Arrays.sort(vals,0,i);
			float split;
			if ( (i & 0x01) > 0 ) {
				split = vals[i>>1];
			} else {
				split = ( vals[i>>1] + vals[(i>>1)-1] ) * 0.5f;
			}
			return splitPartition(root,axis.getIndex(),split);
		}
	}
	
	static class Cube2HierarchicalWorkloadPartitioner extends Cube2WorkloadPartitioner {
		@Override
		protected Axis chooseAxis(Cube2 c,List<Axis> axes,List<ITuple> sample) {
			int lv = c.getLevel() % 3;
			for ( Axis a : axes ) {
				if ( a.getIndex() == lv ) return a;
			}
			return super.chooseAxis(c,axes,sample);
		}
	}
	
	static class Cube2WorkloadPartitioner extends Cube2Partitioner {
		CostEstimator leftCost;
		CostEstimator rightCost;
		
		protected class Axis implements Comparable<Axis> {
			final int index;
			final float mv;
			final float Mv;
			final Histogram h;
			Histogram.Bucket peak;
			
			Axis(int idx,ITuple m,ITuple M) {
				index = idx;
				mv = m.getFloat(index);
				Mv = M.getFloat(index);
				h = new Histogram(mv, Mv, eps);
			}
			
			public int getIndex() { return index; }
			public float getLength() { return Mv - mv; }
			public Histogram getHistogram() { return h; }
			public Histogram.Bucket getPeakBucket() { return peak; }
			public float getSplit() { return (float)peak.getMid(); }
			
			public void build(List<ITuple> data) {
				h.build(data, index);
				int b = h.getMostFrequentBucket();
				peak = h.getBucket(b);
			}
			
			@Override
			public int compareTo(Axis o) {
				return o.peak.getFrequency() - peak.getFrequency();
			}
		}
		
		@Override
		public void init(EaggCostModel model) {
			leftCost = model.createWorkCostEstimator();
			rightCost = model.createWorkCostEstimator();
		}
		
		protected Axis chooseAxis(Cube2 p,List<Axis> axes,List<ITuple> sample) {
			for ( Axis a : axes ) {
				a.build(sample);
			}
			
			// invoke workload estimation function for each bucket.
			// alternately add buckets until there is nothing left.
			
			for ( Axis a : axes ) {
				if ( LOG.isDebugEnabled() ) {
					LOG.debug("Axis {}: peak bucket = {}",a.index,a.getPeakBucket());
				}
			}
			
			Collections.sort(axes);
			
			return axes.get(0);	
		}
		
		@Override
		public PartitionNode[] partition(PartitionNode root) {
			// pick median along the most sparse axis (i.e., max variance)
			Cube2 p = (Cube2)root.getPartition();
			Point3D mp = p.getMinPoint();
			Point3D Mp = p.getMaxPoint();
			
			List<Axis> axes = new ArrayList<Axis>(3);
			for ( int i = 0; i < 3; ++i ) {
				Axis a = new Axis(i,mp,Mp);
				if ( a.getLength() > 2*MIN_LENGTH_FACTOR*eps ) {
					axes.add(a);
				}
			}
			
			if ( axes.isEmpty() ) return null; // can't split more

			List<ITuple> samples = root.getSample();
//			
//			for ( Axis a : axes ) {
//				a.build(samples);
//			}
//			
//			// invoke workload estimation function for each bucket.
//			// alternately add buckets until there is nothing left.
//			
//			for ( Axis a : axes ) {
//				if ( LOG.isDebugEnabled() ) {
//					LOG.debug("Axis {}: peak bucket = {}",a.index,a.getPeakBucket());
//				}
//			}
//			
//			Collections.sort(axes);
//			
//			Axis axis = axes.get(0);
			Axis axis = chooseAxis(p,axes,samples);
			final int field = axis.getIndex();
			
			// we are going to split along dimension where the highest peak exists.
			// sort the samples along axis
			Collections.sort(samples,new Comparator<ITuple>() {
				@Override
				public int compare(ITuple o1, ITuple o2) {
					return (int)Math.signum(o1.getFloat(field) - o2.getFloat(field));
				}});
			
			int l = -1;
			int r = samples.size();

			if ( LOG.isDebugEnabled() ) {
				LOG.debug("# of samples = {}",samples.size());
			}
			
			leftCost.init(p,0);
			rightCost.init(p,0);
			
			leftCost.update(samples.get(++l));
			rightCost.update(samples.get(--r));
			
			float lv = samples.get(l).getFloat(field);
			float rv = samples.get(r).getFloat(field);
			int numLv = 1;
			int numRv = 1;
			
			double lCost = leftCost.getCost();
			double rCost = rightCost.getCost();
			
			while ( lv < rv && l+1 != r ) {
				boolean incrLeft;
				if ( lCost > rCost ) {
					incrLeft = false;
				} else if ( rCost > lCost ) {
					incrLeft = true;
				} else {
					incrLeft = Math.random() < 0.5;
				}
				
				if ( incrLeft ) {
					leftCost.update(samples.get(++l));
					lCost = leftCost.getCost();
					float v = samples.get(l).getFloat(field);
					if ( v != lv ) {
						numLv = 0;
						lv = v;
					}
					++numLv;
				} else {
					rightCost.update(samples.get(--r));
					rCost = rightCost.getCost();
					float v = samples.get(r).getFloat(field);
					if ( v != rv ) {
						numRv = 0;
						rv = v;
					}
					++numRv;
				}
			}
			
			if ( LOG.isDebugEnabled() ) {
				LOG.debug("l={} (value={}, num={}, cost={}); r={} (value={}, num={}, cost={})",new Object[] {l,lv,numLv,lCost,r,rv,numRv,rCost});
			}
			
			// now how to split the mid point?
			float split;
			float prob;
			if ( lv == rv ) {
				// evenly distribute common values until the two costs are even
				split = lv;
				while ( l+1 != r ) {
					boolean incrLeft;
					if ( lCost == rCost ) {
						incrLeft = Math.random() < 0.5;
					} else {
						incrLeft = lCost < rCost;
					}

					if ( incrLeft ) {
						leftCost.update(samples.get(++l));
						lCost = leftCost.getCost();
						++numLv;
					} else {
						rightCost.update(samples.get(--r));
						rCost = rightCost.getCost();
						++numRv;
					}
				}

				prob = numLv / (float)(numLv + numRv);

				if ( LOG.isDebugEnabled() ) {
					LOG.debug("Redistribute: l={} (value={}, num={}, cost={}); r={} (value={}, num={}, cost={}). left prob = {}",new Object[] {l,lv,numLv,lCost,r,rv,numRv,rCost,prob});
				}

			} else {
				// we did our best? take mid value
				split = ( lv + rv ) * 0.5f;
				if ( LOG.isDebugEnabled() ) {
					LOG.debug("split = {}",split);
				}
				prob = 0.5f;
			}
			
			return splitPartition(root,axis.getIndex(),split,prob);
		}
	}
	
	public static Partitioner[] getPartitioner(Configuration conf, EaggCostModel model) {
		Partitioner[] partitioners = new Partitioner[] {
				//ReflectionUtils.newInstance(Cube2UniformPartitioner.class, conf),
				//ReflectionUtils.newInstance(Cube2MedianPartitioner.class, conf),
				//ReflectionUtils.newInstance(Cube2HierarchicalUniformPartitioner.class, conf),
				ReflectionUtils.newInstance(Cube2HierarchicalMedianPartitioner.class, conf),
				//ReflectionUtils.newInstance(Cube2PeakPartitioner.class, conf),
				ReflectionUtils.newInstance(Cube2WorkloadPartitioner.class, conf),
				ReflectionUtils.newInstance(Cube2HierarchicalWorkloadPartitioner.class, conf)
		};
		for ( Partitioner p : partitioners ) {
			p.init(model);
		}
		return partitioners;
	}
}
