package clustersolution

import ("container/vector";
		"rand";
//		"math";
		"fmt";
		"./abbr";
		"./clusterpoint");

var numClasses int;

const (
	MaxInitialVelocity = 0.2;
	MaxInitialCertainty = 0.2;
	PullRate = .05;
	MutationRate = .005;
);

//A dimension of the solution
type ClusterDim struct {cluster int; certainty float; velocity float;}
type ClusterSolution struct {data *[]ClusterDim; numberClusters int; points *vector.Vector; Fitness float;}

type AccelerationRequest struct {
	Mine *ClusterSolution;
	Result chan *ClusterSolution;
}

//Methods on a Solution
func (s *ClusterSolution) DeepCopy(o *ClusterSolution) {
	s.numberClusters = o.numberClusters;
	s.Fitness = o.Fitness;

	data := make([]ClusterDim, len(*o.data));
	data = *o.data;
	s.data = &data;

	s.points = o.points;
}

func (s *ClusterSolution) ClusterMeans() *[]*[]float {
	space := make([]*[]float, s.numberClusters);
	pointCounts := make([]int, s.numberClusters);

	i := 0;

//	fmt.Println(fmt.Sprintf("about to die: points = %d data = %d",(*s.points).Len(),len(*s.data)));


	for element := range (*s.points).Iter() {
		point := element.(clusterpoint.ClusterPoint);
		cluster := (*s.data)[i].cluster;
		pointCounts[cluster] += point.Weight;

		for j := 0; j < abbr.StateAbbrLen; j++ {
			if (point.Val[j]) {


				if space[cluster] == nil {
					mean := make([]float, abbr.StateAbbrLen);
					space[cluster] = &mean;
				}

				(*space[cluster])[j] += float(point.Weight);
			}
		}
		i++;
	}

	for i := 0; i < s.numberClusters; i++ {
		for j := 0; j < abbr.StateAbbrLen; j++ {
			(*space[i])[j] /= float(pointCounts[i]);
		}
	}

	return &space;
}

func (s *ClusterSolution) MeanDifference(a *[]float, b*[]float) float {
	out := 0.0;
	lenI := len(*a);
	lenF := float(lenI);
	
	for i := 0; i < lenI; i++ {
		val := (*a)[i] -(*b)[i];
		out += val/lenF;
	}
	return out;
}

func (s *ClusterSolution) distance(o *ClusterSolution) (float, map[int] int) {
	myClusters := s.ClusterMeans();
	hisClusters := o.ClusterMeans();
	len := len(*myClusters);
	lenF := float(len);
	mapping := make(map[int] int,len);
	
	totalDistance := 0.0;
	
	for i:= 0; i < len; i++ {
		min := 1.0;
		minj := 0;
		for j:= 0; j < len; j++ {
			dif := s.MeanDifference((*myClusters)[i],(*hisClusters)[j]);
			if(dif < min) {
				dif = min;
				minj = j;
			}
		}
		mapping[i] = minj;
		totalDistance += min / lenF;
	}
	
	return totalDistance, mapping;
}

func (s *ClusterSolution) RecalcFitness() {
	s.Fitness = s.fitness();
}

func (s *ClusterSolution) fitness() float {
	myClusters := s.ClusterMeans();
	variance := 0.0;
	len := len(*myClusters);
	for i:= 0; i < len; i++ {
//		fmt.Println("");
		for j:= 0; j < abbr.StateAbbrLen; j++ {
			//We want variance of 0 for 0,1 and 1 for 0.5
			val := (*(*myClusters)[i])[j];
//			fmt.Print(val," ");
			if val < 0.5 {
				variance += 2*val;
			} else {
				variance += 2 - 2*val;
			}
//			variance += (1-float(math.Sqrt(float64(val))));
		}

	}
	return variance;
}

func (s *ClusterSolution) update(dist float,
								 pull *ClusterSolution,
								 mapping map[int] int) {
	len := len(*s.data);
	for i:=0; i < len; i++ {
		pullC := mapping[(*pull.data)[i].cluster];
		accel := (1.0 - dist);
		if(pullC != (*s.data)[i].cluster) { accel *= -1; }
		
		//Acceleration modifies velocity
		o := (*s.data)[i].velocity;
		n := 0.0;
		if o>0 { n= -o/(o-1) } else { n= o/(o+1);}
		n += accel;
		if n>0 {
			(*s.data)[i].velocity = n/(n+1);
		} else {
			(*s.data)[i].velocity = n/(1-n);
		}
	
		//Velocity modifies position

		//We want something like log effects on certainty
		//where velocity matters more at low certainty, less at high
		//we're modeling this on the function n/n+1
		o = (*s.data)[i].certainty;
		n = -o/(o-1);
		n += (*s.data)[i].velocity;
		if n>0 {
			(*s.data)[i].certainty = n/(n+1);
		} else {
			(*s.data)[i].certainty = 0;
		}
		
		//If we're below threshold, we can possibly switch the point
		//to another cluster
		if (*s.data)[i].certainty < MaxInitialCertainty &&
			(*s.data)[i].velocity < 0 &&
			accel < 0 &&
			rand.Float() < PullRate {
			(*s.data)[i].cluster = pullC;
			vel := -1 * (*s.data)[i].velocity;
			randvel := rand.Float() * MaxInitialVelocity;
			if randvel > vel {vel = randvel;}
			(*s.data)[i].velocity = vel;
	//		fmt.Print(",");
		} else if rand.Float() < MutationRate*MutationRate ||
			((*s.data)[i].certainty < MaxInitialCertainty &&
			(*s.data)[i].velocity < 0 &&
			rand.Float() < MutationRate ) {
			(*s.data)[i].cluster = rand.Intn(numClasses);
			vel := -1 * (*s.data)[i].velocity;
			randvel := rand.Float() * MaxInitialVelocity;
			if randvel > vel {vel = randvel;}
			(*s.data)[i].velocity = vel;
	//		fmt.Print("^");
		}
	}
	
	return;
}
func (s *ClusterSolution) Kmeans() {
	//Calculate Means
	means := s.ClusterMeans();
	
	//Recalculate Clusters
	i := 0;
	counts := make([]int,s.numberClusters);
	for element := range (*s.points).Iter() {
		point := element.(clusterpoint.ClusterPoint);
		mink := 0;
		minkv := 1.0;
		for k := 0; k < len(*means); k++ {
			dist := point.Dist((*means)[k]);
			if dist < minkv {
				mink = k;
				minkv = dist;
			}
		}
		(*s.data)[i].cluster = mink;
		counts[mink]++;
		i++;
	}
	//Redistribute empty clusters
	maxk := 0;
	for i := 0; i < s.numberClusters; i++ {
		if counts[i] > counts[maxk] {
			maxk = i;
		}
	}
	for i := 0; i < s.numberClusters; i++ {
		if counts[i] == 0 {
			for j:= 0; j < len(*s.data); j++ {
				if (*s.data)[j].cluster == maxk {
					(*s.data)[j].cluster = i;
					break;
				}
			}
		}
	}
}

func (s *ClusterSolution) Run(reqChan chan *AccelerationRequest,
		iter int,
		doneChan chan int) {

	fmt.Print("!");

	var req AccelerationRequest;
	req.Mine = s;
	response := make(chan *ClusterSolution);
	req.Result = response;

	s.Fitness = s.fitness();
	reqChan <- &req;
	for i := 0; i < iter; i++ {
		result := <-response;

		//Note: distance called on result
		//in order to get the mapping to give a local cluster
		//corresponding to each remote cluster. (ie, the inverse
		//mapping ensures that this will be onto.)
		dist,imap := result.distance(s);
		

		s.update(dist,result,imap);
		s.Fitness = s.fitness();

		req.Mine = s;
		reqChan <- &req;
	}
	//Take out the extra response
	<-response;

	fmt.Print(".");
	doneChan <- 1;
}
	

//Create a randomized initial solution
func MakeSolution(size, classes int, points *vector.Vector) *ClusterSolution {
	var s *ClusterSolution = new(ClusterSolution);
	s.numberClusters = classes;
    d := make([]ClusterDim, size);
    s.data = &d;
    s.points = points;
    s.Fitness = abbr.StateAbbrLen;
    numClasses = classes;
    
    for i := 0; i < size; i++ {
    	(*s.data)[i].cluster = rand.Intn(classes);
    	(*s.data)[i].certainty = rand.Float() * MaxInitialCertainty;
    	(*s.data)[i].velocity = rand.Float() * 2 * MaxInitialVelocity - MaxInitialVelocity;
    }
    
    return s;
}
