package skewreduce.dfof;

import java.io.DataInput;
import java.io.IOException;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;

import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.util.ReflectionUtils;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;

import skewreduce.framework.planner.CostEstimator;
import skewreduce.framework.planner.EaggCostModel;
import skewreduce.framework.planner.axis.Axis;
import skewreduce.lib.Cube2;
import skewreduce.lib.Histogram;
import skewreduce.lib.ITuple;
import skewreduce.lib.IntSparseArray;
import skewreduce.lib.Partition;
import skewreduce.lib.PhysicsParticle;
import skewreduce.lib.Point3D;
import skewreduce.lib.SparseArrayRef;

import gnu.trove.TLongIntHashMap;

public final class dFoFCostModel extends EaggCostModel {
	private static final Logger LOG = LoggerFactory.getLogger("skewreduce.CostModel");
	
	Cube2 rootPartition;
	
	int costWorkMode;
	int costMergeMode;
	float eps;
	int buckets;
	
	boolean isDataPoint;
	
	CostEstimator workEstimator;
	
	private List<ITuple> getMergeInput(Partition p,List<? extends ITuple> s,int src,int to) {
		List<ITuple> ret = new ArrayList<ITuple>();
		Cube2 cube = (Cube2)p;
		for ( ITuple t : s.subList(src, to) ) {
			//if ( p.atSkin(t, eps) ) {
			if ( cube.atMergeSurface(t, eps) ) {
				ret.add(t);
			}
		}
		return ret;
	}
	
	
	static class CountCost extends CostEstimator {
		int n;
		
		@Override
		public void init(Partition p,int sz) {
			n = 0;
		}
		@Override
		public void update(ITuple t) {
			++n;
		}
		@Override
		public void update(List<? extends ITuple> data) {
			n += data.size();
		}
		@Override
		public double getCost() {
			return n / sigma;
		}
	}
	
	static class Histogram1DCost extends CostEstimator {
		Cube2 cube;
		int nBuckets;
		
		Histogram[] h = new Histogram[3];
		long[] cost = new long[3];
		
		@Override
		public void setConf(Configuration conf) {
			super.setConf(conf);
	        nBuckets = conf.getInt("dfof.costmodel.histogram.buckets",10);
		}
		
		@Override
		public void init(Partition p,int sz) {
			cube = (Cube2)p;
			Point3D mp = cube.getMinPoint();
			Point3D Mp = cube.getMaxPoint();
			for ( int i = 0; i < 3; ++i ) {
				h[i] = new Histogram(mp.getFloat(i),Mp.getFloat(i),nBuckets);
				cost[i] = 0;
			}
		}
		
		@Override
		public void update(ITuple t) {
			for ( int i = 0; i < 3; ++i ) {
				int bucket = h[i].add(t, i);
				cost[i] += ( (h[i].getBucketValue(bucket)-1) << 1 ) + 1;
			}
		}

		@Override
		public double getCost() {
			return Math.max(cost[0], Math.max(cost[1],cost[2])) / (sigma*sigma);
		}
	}

	static class Histogram3DCost extends CostEstimator {
		HashMap<Group,Group> group = new HashMap<Group,Group>();
		Cube2 cube;
		float eps;
		float mx;
		float my;
		float mz;
		long cost;
		
		@Override
		public void setConf(Configuration conf) {
			super.setConf(conf);
	        eps = conf.getFloat("dfof.params.eps",0.00026042f);
		}
		
		public void init(Partition p,int sz) {
			cube = (Cube2)p;
			group = new HashMap<Group,Group>((int)(sz*1.8));
			mx = cube.getMinPoint().get(0);
			my = cube.getMinPoint().get(1);
			mz = cube.getMinPoint().get(2);
			cost = 0;
		}
		
		public void update(ITuple t) {
			int binX = (int)((t.getFloat(0) - mx) / eps);
			int binY = (int)((t.getFloat(1) - my) / eps);
			int binZ = (int)((t.getFloat(2) - mz) / eps);
			
			Group grp = new Group(binX,binY,binZ);
			Group grp2 = group.get(grp);
			if ( grp2 == null ) {
				group.put(grp, grp);
				grp2 = grp;
			}
			cost += ((grp2.getCount() << 1) + 1);
			grp2.incr();			
		}

		public double getCost() {
			return cost / (sigma*sigma);
		}
	}

	static class Histogram3DCostTrove extends CostEstimator {
		TLongIntHashMap group;
		Cube2 cube;
		float eps;
		float mx;
		float my;
		float mz;
		long cost;
		
		@Override
		public void setConf(Configuration conf) {
			super.setConf(conf);
	        eps = conf.getFloat("dfof.params.eps",0.00026042f);
			
			if ( LOG.isInfoEnabled() ) {
				LOG.info("set eps = {}, sigma = {}",eps, sigma);
			}
		}
		
		@Override
		public void init(Partition p,int sz) {
			cube = (Cube2)p;
			group = new TLongIntHashMap(sz * 2);
//			if ( group == null ) {
//				group = new TLongIntHashMap(sz * 2);
//			} else {
//			}
			mx = cube.getMinPoint().get(0);
			my = cube.getMinPoint().get(1);
			mz = cube.getMinPoint().get(2);
			cost = 0;
		}
		
		@Override
		public void update(ITuple t) {
			int binX = (int)((t.getFloat(0) - mx) / eps);
			int binY = (int)((t.getFloat(1) - my) / eps);
			int binZ = (int)((t.getFloat(2) - mz) / eps);
			
			if ( binX > 0x1fffff || binY > 0x1fffff || binZ > 0x1fffff )
				throw new IllegalStateException();
			
			long grpKey = binX << 42 | binY << 21 | binZ;
			cost += ((group.get(grpKey) << 1) + 1);
			group.adjustOrPutValue(grpKey, 1, 1);
		}

		@Override
		public double getCost() {
			return cost / (sigma*sigma);
		}
		
		@Override
		public void free() {
			group = null;
		}
	}

	static class Histogram3DCostX extends CostEstimator {
		ArrayList<SparseArrayRef<IntSparseArray>> group;
		Cube2 cube;
		float eps;
		float mx;
		float my;
		float mz;
		
		int bucketX;
		int bucketY;
		int bucketZ;
		long cost;
		
		@Override
		public void setConf(Configuration conf) {
			super.setConf(conf);
	        eps = conf.getFloat("dfof.params.eps",0.00026042f);
		}
		
		public void init(Partition p,int sz) {
			cube = (Cube2)p;			
			mx = cube.getMinPoint().get(0);
			my = cube.getMinPoint().get(1);
			mz = cube.getMinPoint().get(2);
			
			
			// order by length -- longest to ...
			bucketX = (int)(cube.getLength(0).floatValue()/eps)+1;
			bucketY = (int)(cube.getLength(1).floatValue()/eps)+1;
			bucketZ = (int)(cube.getLength(2).floatValue()/eps)+1;
			
			group = new ArrayList<SparseArrayRef<IntSparseArray>>(bucketX);
			
			cost = 0;
		}
		
		private IntSparseArray check(int x,int y,int z) {
			SparseArrayRef<IntSparseArray> arrX = group.get(x);
			if ( arrX == null ) {
				arrX = new SparseArrayRef<IntSparseArray>(bucketY);
				group.set(x,arrX);
			}
			IntSparseArray arrZ = arrX.get(y);
			if ( arrZ == null ) {
				arrZ = new IntSparseArray(bucketZ);
				arrX.set(y, arrZ);
			}
			return arrZ;
		}
		
		private int getAndIncr(int x,int y,int z) {
			IntSparseArray arr = check(x,y,z);
			int v = arr.get(z);
			arr.set(z, v+1);
			return v;
		}
		
		public void update(ITuple t) {
			int binX = (int)((t.getFloat(0) - mx) / eps);
			int binY = (int)((t.getFloat(1) - my) / eps);
			int binZ = (int)((t.getFloat(2) - mz) / eps);
			cost += ((getAndIncr(binX,binY,binZ) << 1) + 1);
		}

		public double getCost() {
			return cost / (sigma*sigma);
		}
	}

	static class Histogram3DCost2 extends Histogram3DCost {
		@Override
		public void setConf(Configuration conf) {
			super.setConf(conf);
	        eps = conf.getFloat("dfof.params.eps",0.00026042f) * 2.0f;
		}
	}
	
	
	
	@Override
	public double costMerge(Partition p,Partition p1, List<? extends ITuple> s1,int src1,int to1,
			Partition p2, List<? extends ITuple> s2,int src2,int to2) {		
		List<ITuple> i1 = getMergeInput(p,s1,src1,to1);
		List<ITuple> i2 = getMergeInput(p,s2,src2,to2);
		double cost = 0.0;
		
		if ( LOG.isDebugEnabled() ) {
			LOG.debug("Total input = {} / {}",s1.size(),s2.size());
			LOG.debug("Merge input = {} / {}",i1.size(),i2.size());
		}
		
		float binWidth = eps;
		
		switch ( costMergeMode ) {
			case 0:
				// based on number of particles
				//cost = i1.size() + i2.size();
				cost = Complexity.NLOGN.expectedCost(sigma, i1.size()+i2.size(), i1.size() + i2.size());
				break;
			case 1:
				// based on histogram
				{
					Partition mp = p.getMergingArea(p1,p2,eps);
					if ( LOG.isDebugEnabled() ) {
						LOG.debug("merging area = {}",mp);
					}
					Histogram1DCost costEst = new Histogram1DCost();
					costEst.setConf(conf);
					costEst.init(mp,i1.size()+i2.size());
					costEst.update(i1);
					costEst.update(i2);
					
					cost = costEst.getCost();
				}
				break;
			case 3:
			{
				binWidth *= 2.0f;
			}
			;
			case 2:
				{
					// group. divide by eps/eps/eps cube
					long c = 0;
					Cube2 cube = (Cube2)p1;
					HashMap<Group,Group> group = new HashMap<Group,Group>();
					float mx = cube.getMinPoint().get(0);
					float my = cube.getMinPoint().get(1);
					float mz = cube.getMinPoint().get(2);
					for ( ITuple t : i1 ) {
						int binX = (int)((t.getFloat(0) - mx) / binWidth);
						int binY = (int)((t.getFloat(1) - my) / binWidth);
						int binZ = (int)((t.getFloat(2) - mz) / binWidth);
						
						Group grp = new Group(binX,binY,binZ);
						Group grp2 = group.get(grp);
						if ( grp2 == null ) {
							group.put(grp, grp);
							grp2 = grp;
						}
						c += ((grp2.getCount() << 1) + 1);
						grp2.incr();
					}
					for ( ITuple t : i2 ) {
						int binX = (int)((t.getFloat(0) - mx) / binWidth);
						int binY = (int)((t.getFloat(1) - my) / binWidth);
						int binZ = (int)((t.getFloat(2) - mz) / binWidth);
						
						Group grp = new Group(binX,binY,binZ);
						Group grp2 = group.get(grp);
						if ( grp2 == null ) {
							group.put(grp, grp);
							grp2 = grp;
						}
						c += ((grp2.getCount() << 1) + 1);
						grp2.incr();
					}

					cost = c / (sigma*sigma);				
				}
				break;
			default:
				throw new IllegalStateException("Invalid cost model for merge: "+costMergeMode);
		}

		
		if ( LOG.isDebugEnabled() ) {
			//LOG.debug("final merge cost = {}",Complexity.NLOGN.expectedCost(sigma, i1.size()+i2.size(), cost));
			LOG.debug("final merge cost = {}",cost);
		}
		
		return cost;
		//return Complexity.NLOGN.expectedCost(sigma, i1.size()+i2.size(), cost);
	}
	
	static class Group {
		final int binX;
		final int binY;
		final int binZ;
		int count;
		
		Group(int x,int y,int z) { binX = x; binY = y; binZ = z; }
		
		@Override
		public int hashCode() { return binX << 20 | binY << 10 | binZ; }
		
		@Override
		public boolean equals(Object o) {
			if ( o instanceof Group ) {
				Group grp = (Group)o;
				return binX == grp.binX && binY == grp.binY && binZ == grp.binZ;
			}
			return false;
		}
		
		public int getCount() { return count; }
		public void incr() { ++count; }
	}

	@Override
	public double costWork(Partition p, List<? extends ITuple> s,int src,int to) {
		workEstimator.init(p,to - src + 1);
		workEstimator.update(s.subList(src, to));
		if ( LOG.isDebugEnabled() ) {
			LOG.debug("final cost = {} ({}s)",workEstimator.getCost(),workEstimator.getCost() * getWork2Sec());
		}
		double cost = workEstimator.getCost();
		workEstimator.free();
		return cost;
	}

	@Override
	public ITuple createTuple(DataInput in) throws IOException {
		in.readLong();
		if ( isDataPoint ) {
			return Point3D.read(in);
		} else {
			return PhysicsParticle.read(in);
		}
	}

	@Override
	public Partition getRootPartition() {
		return rootPartition;
	}
	
	@Override
	public int getWorkRecordSize() {
		return ( isDataPoint ) ? 20 : 40;
	}
	
	@Override
	public int getMergeRecordSize() {
		return ( isDataPoint ) ? 28 : 48;
	}

	@Override
	protected void init(Configuration conf) {
		costWorkMode = conf.getInt("dfof.costmodel.work",0);
		costMergeMode = conf.getInt("dfof.costmodel.merge",0);
        eps = conf.getFloat("dfof.params.eps",0.00026042f);
        buckets = conf.getInt("dfof.costmodel.histogram.buckets",25);

        String spec = conf.get("dfof.costmodel.root");
        if ( spec == null ) {
        	rootPartition = new Cube2(0,1.0f);
        } else {
        	rootPartition = new Cube2(spec);
        }
        
        isDataPoint = conf.getBoolean("dfof.datatype.point",true);
        
        workEstimator = createWorkCostEstimator();
	}

	@Override
	public Complexity getMergeComplexity() {
		return Complexity.NLOGN;
	}

	@Override
	public Complexity getWorkComplexity() {
		return Complexity.NLOGN;
	}

	@Override
	public CostEstimator createWorkCostEstimator() {
		CostEstimator e = null;
		switch ( costWorkMode ) {
			case 0: e = ReflectionUtils.newInstance(CountCost.class,conf); break;
			case 1: e = ReflectionUtils.newInstance(Histogram1DCost.class,conf); break;
			//case 2: e = ReflectionUtils.newInstance(Histogram3DCost.class,conf); break;
			case 2: e = ReflectionUtils.newInstance(Histogram3DCostTrove.class,conf); break;
			case 3: e = ReflectionUtils.newInstance(Histogram3DCost2.class,conf); break;
			default:
				throw new IllegalStateException("Worker cost has not been initialized properly");
		}
		return e;
	}

	@Override
	public void initAxis(Partition partition, Axis axis) {
		Cube2 cube = (Cube2)partition;
		Point3D mp = cube.getMinPoint();
		Point3D Mp = cube.getMaxPoint();
		axis.setBound(mp.getFloat(axis.getIndex()),Mp.getFloat(axis.getIndex()));
		axis.setHistogram(eps);
	}
}
