package gray;

import genome.*;

import java.util.ArrayList;
import java.util.Collection;
import java.util.Collections;
import java.util.Hashtable;
import java.util.Iterator;
import java.util.Set;
import java.util.Vector;

import coevolution.GGTGenerator;

import tool.*;


public class GrayTest {
	
	public static Hashtable<String,String> greyMap = new Hashtable<String,String>(); //Maps integer to grey
	public static Hashtable<String,String> binMap = new Hashtable<String,String>(); //Maps grey to binary
	public static Hashtable<String,String> b2Map = new Hashtable<String,String>(); //Maps binary to integer
	public static int hashN = 0;
	public static double greyMapLocality = 1.0;
	public static double tLocLevel = 1.0; //used to allow generator to set target locality level to degrade to
	
	//Contains general use methods for IO, population manipulation and stats.
	
	//Need to use the below static routines for manipulating arrays of genotypes to ensure deep copy applied correctly.
	
	public static void clearEncodings(){
		
		greyMap = new Hashtable<String,String>(); //Maps integer to grey
		binMap = new Hashtable<String,String>(); //Maps grey to binary
		b2Map = new Hashtable<String,String>(); //Maps binary to integer
		hashN = 0;
	}
	
	public static Hashtable<String,String> degradeEncoding(Hashtable<String,String> h, double targetLocality, boolean inverse){
		//degrades a mapping to desired level by perturbing it randomly (greedy search)
		//the mapping required is integer --> bitstring
		//if the opposite, use inverse = true
		
		if(inverse){
			h = tool.Utility.getInverseHash(h);
		}
		
		double tol = 0.01;
		
		double d = Math.abs(targetLocality - normalisedLocalityP(h)); //difference between desired locality and target
		for(int i=0;i<25000 && !(d<=tol);i++){
			System.out.println(i+" "+d);
			Hashtable<String,String> h2 = perturbMap(h,10);
			double l = normalisedLocalityP(h2);
			double d2 = Math.abs(targetLocality - l);
			if(d2<d){
				h = h2;
				d = d2;
				greyMapLocality = l;
			}
		}
		
		if(inverse){
			h = Utility.getInverseHash(h);
		}
		return h;
	}
	
	public static void delocaliseGreyCode(){
		//carries out a blind random search to enforce a poor/good phenotypic neighbourhood
		double l = normalisedLocalityP(greyMap);
		for(int i=0;i<1000;i++){
			Hashtable<String,String> greyMap2 = randomiseKeys(greyMap);
			double l2 = normalisedLocalityP(greyMap2);
			if(l2>l){
				greyMap = greyMap2;
				l = l2;
			}
		}
		greyMapLocality = l;
	}
	
	public static String[] getNeighbours(String s){
		String[] neighbours = new String[s.length()];
		int[] x = Utility.strToIntArray(s);
		for(int i=0;i<s.length();i++){
			int[] xtemp = new int[s.length()];
			System.arraycopy(x, 0, xtemp, 0, x.length);
			if(xtemp[i]==0){
				xtemp[i] = 1;
			}
			else {
				xtemp[i] = 0;
			}
			neighbours[i] = Utility.intArrayToString(xtemp);
		}
		return neighbours;
	}
	

	    // append order n gray code to end of prefix string, and print
	    public static void gray(String prefix, int n) {
	        
	    	if (n == 0) greyMap.put(""+greyMap.size(), prefix);
	        else {
	            gray(prefix + "0", n - 1);
	            yarg(prefix + "1", n - 1);
	        }
	    }
	
	    
	
	public static double lPSampled(Hashtable<String,String> h, int nSamples){
		//estimates PHENOTYPIC locality for integer-->binary map using normalised def. of locality (as normLocP) but
		//uses a sample rather than summing over the whole genotype space.
		
		double l=0;
		
		double dGMax = hashN; //maximum difference in genotype space
		double dGMin = 0; //minimum difference in genotype space
		
		int N = 0; //normalisation constant, computed during process
		
		int sz = h.size();
		for(int i=0;i<nSamples;i++){
			int x = GGTGenerator.R.nextInt(sz);
			int[] s0 = Utility.strToIntArray(""+h.get(""+x));
			int x1 = x-1;
			if(x1>=0){
				int[] s1 = Utility.strToIntArray(h.get(""+x1));
				int dG = (Utility.ham(s0,s1)-1);
				l = l+((dGMax - dG)/(dGMax-dGMin));
				
				N = N+1; //increase normalisation constant as an element of the summation is computed
			}
			
			int x2 = x+1;
			if(x2<h.size()){
				int[] s2 = Utility.strToIntArray(h.get(""+x2));
				int dG = (Utility.ham(s0,s2)-1);
				l = l+((dGMax - dG)/(dGMax-dGMin));
				
				N = N+1;
			}
			
		}
		return l/N;
	}
	
	//need to check this routine!! Still having problems
	public static double normalisedLocalityG(Hashtable<String,String> h){
		//calculates GENOTYPIC locality for an integer-->binary mapping,
		//using the normalised definition of locality
		//Lp = 1/N * sum_over_genotypes (sum_over_co-adjacent_phenotypes ((max dP - dP)/(max dP - min dP)))
		
		Hashtable<String,String> h2 = tool.Utility.getInverseHash(h);
		
		double l = 0;
		int count = 0;
		
		double dPMax = Math.pow(2, hashN)-1;
		double dPMin = 1;
		
		Set<String> bin = h2.keySet();
		
		for(String s1: bin){
			int p1 = Integer.parseInt(h2.get(s1));
			String[] n = getNeighbours(s1);
			double partial_sum = 0.0;
			for(int i=0;i<n.length;i++){
				String s2 = n[i];
				int p2 = Integer.parseInt(h2.get(s2));
				int dP = Math.abs(p2 - p1);
				partial_sum = partial_sum + ((dPMax - dP)/(dPMax - dPMin));
				count = count+1;
			}
			//System.out.println(p1+" "+s1+" "+partial_sum/24);
			l = l+partial_sum;
		}
		//System.out.println("Count: "+count);
		return l/count;
		
	}
	
	public static double normalisedLocalityP(Hashtable<String,String> h){
		
		//calculates PHENOTYPIC locality for an integer-->binary mapping,
		//using the normalised definition of locality
		//Lp = 1/N * sum_over_phenotypes (sum_over_co-adjacent_genotypes ((max dG - dG)/(max dG - min dG)))
		
		double l=0;
		
		double dGMax = hashN; //maximum difference in genotype space
		double dGMin = 1; //minimum difference in genotype space
		
		for(int i=0;i<h.size();i++){
			
			double partial_sum = 0.0;
			boolean endPoint = false;
			
			int[] s0 = Utility.strToIntArray(h.get(""+i));
			int x1 = i-1;
			if(x1>=0){
				int[] s1 = Utility.strToIntArray(h.get(""+x1));
				int dG = (Utility.ham(s0,s1));
				partial_sum = partial_sum+((dGMax - dG)/(dGMax-dGMin));
			}
			else {endPoint = true;}
			
			int x2 = i+1;
			if(x2<h.size()){
				int[] s2 = Utility.strToIntArray(h.get(""+x2));
				int dG = (Utility.ham(s0,s2));
				partial_sum = partial_sum+((dGMax - dG)/(dGMax-dGMin));
			}
			else {endPoint=true;}
			
			if(endPoint){partial_sum = partial_sum *2.0;}//temporary hack to correct for ends
			
			//System.out.println(h.get(""+i)+" "+i+" "+partial_sum);
			l = l+partial_sum;
		}
		int N = 2*(h.size()); //The normalisation constant. The normalisation constant is modified by 1 to represent the end elements of the set (which are only adjacent)
		//to one integer value
		return l/N;
	}
	
	public static double localityG(Hashtable<String,String> h){
		//calculates GENOTYPIC locality for a binary-->integer mapping
		double l = 0;
		Set<String> bin = h.keySet();
		for(String s1: bin){
			int p1 = Integer.parseInt(h.get(s1));
			String[] n = getNeighbours(s1);
			double partial_sum = 0.0;
			for(int i=0;i<n.length;i++){
				String s2 = n[i];
				int p2 = Integer.parseInt(h.get(s2));
				partial_sum = partial_sum + Math.abs(p2-p1);
			}
			l = l+partial_sum;
		}
		return l/2; //factor 2 comes in to avoid double count
	}
	
	public static int localityP(Hashtable<String,String> h){
		//calculates PHENOTYPIC locality for an integer-->binary mapping
		
		int l=0;
		
		for(int i=0;i<h.size();i++){
			int[] s0 = Utility.strToIntArray(h.get(""+i));
			int x1 = i-1;
			if(x1>0){
				int[] s1 = Utility.strToIntArray(h.get(""+x1));
				l = l + (Utility.ham(s0,s1)-1);
			}
			
			int x2 = i+1;
			if(x2<h.size()){
				int[] s2 = Utility.strToIntArray(h.get(""+x2));
				l = l + (Utility.ham(s0,s2)-1);
			}
			
		}
		return l;
	}
	
	public static void main(String args[]){
			for(int i=3;i<10;i++){
				populateGreyHash(i);		
				Vector<double[][]> d = getDistanceMatrices(Utility.getInverseHash(b2Map));
				Utility.writeMatToFile("./dMaps/gB"+i+".map",d.get(0));
				Utility.writeMatToFile("./dMaps/pB"+i+".map",d.get(1));
			}
	}
	
	public static Hashtable<String,String> perturbMap(Hashtable<String,String> h, int n){
		
		//returns a perturbed copy of the map with two values swapped randomly
		
		//need a more concise way of copying the map!!
		Hashtable<String,String> h2 =  Utility.copyHash(h);
		
		for(int i=0;i<n;i++){
			
			Object[] keys = (Object[])((h2.keySet()).toArray());
			
			int x1 = GGTGenerator.R.nextInt(keys.length);
			int x2 = GGTGenerator.R.nextInt(keys.length);
			
			String k1 = (String)keys[x1]; String k2 = (String)keys[x2];
			String v1 = h2.get(k1); String v2 = h2.get(k2);
			
			h2.remove(k1); h2.remove(k2);
			h2.put(k1, v2); h2.put(k2, v1);
		}
		return h2;
		
	}
	
	public static void populateGreyHash(int n){
		if(hashN!=n){
			clearEncodings();
			GrayTest.gray("",n);
			hashN = n;
			}
		
		
		//Temporarily modified to generate a worst case code by phenotypic locality
		//randomiseEncoding();
		//greyMap = degradeEncoding(greyMap,tLocLevel,false);
		
		for (int i = 0; i<greyMap.size(); i++) {
			String grey = greyMap.get(""+i);
			String bin = Utility.IntToBinStr(i,hashN);
	         binMap.put(grey, bin);
	         b2Map.put(Utility.IntToBinStr(i,hashN),""+i);
	     }
	}
	
	public static void randomiseEncoding(){
		Hashtable<String,String> greyMap2 = randomiseKeys(greyMap);
		greyMap = greyMap2;
	}
	
	public static Hashtable<String,String> randomiseKeys(Hashtable<String,String> h){
		//double t = System.currentTimeMillis();
		
		Hashtable<String,String> h2 = new Hashtable<String,String>();
		
		Set<String> keys = h.keySet();
		Collection<String> vals = h.values();
		ArrayList<String> v = new ArrayList<String>(vals);
		Collections.shuffle(v);
		
		Iterator<String> k = v.iterator();
		for(String key : keys){
			h2.put(key,k.next());
		}
		
		//double t2 = System.currentTimeMillis() - t;
		//System.out.println(t2);
		
		return h2;
	}
	
	
	//Efficient gray code calculation from princeton http://introcs.cs.princeton.edu/23recursion/GrayCode.java.html
	  public static void yarg(String prefix, int n) {
	        if (n == 0)greyMap.put(""+greyMap.size(), prefix);
	        else {
	            gray(prefix + "1", n - 1);
	            yarg(prefix + "0", n - 1);
	        }
	    }
	  
	  public static Vector<double[][]> getDistanceMatrices(Hashtable<String,String> h){
		  //really quick and dirty method of getting distance matrices from hashmap, unordered.
		  
			double[][] GD = new double[h.size()][h.size()];
			double[][] PD = new double[h.size()][h.size()];
			ArrayList<String> phenotypes = new ArrayList<String>(h.keySet());
			int i = 0; int j = 0;
			for(String p1 : phenotypes){
				int[] g1 = Utility.strToIntArray(h.get(p1));
				int x1 = Integer.parseInt(p1);
				for(String p2 : phenotypes){
					int[] g2 = Utility.strToIntArray(h.get(p2));
					int x2 = Integer.parseInt(p2);
					GD[i][j] = Utility.ham(g1, g2);
					PD[i][j] = Math.abs(x2 - x1);
					j = j+1;
				}
				j = 0;
				i = i+1;
			}
			Vector<double[][]> d = new Vector<double[][]>();
			d.add(GD); d.add(PD);
			return d;
		}
}
