""" Classes and methods for the coalescent given a complex demographic history. 
See Volz, E.M., Complex population dynamics and the coalescent under neutrality. Genetics 2012. 

Three basic analyses are supported: 
1. Calculation of the likelihood of a tree
2. Simulation of trees 
3. Calculation of the probability that a particular taxon in a tree begat another taxon in the tree. 

The basic inputs of an analysis are
1. nwkstring -- A tree or trees in Newick format (not required if simulating trees)
2. sampleTimes -- the time of sampling for each taxon
3. sampleStates -- the state of each taxon
4. F,G,Y,t -- births, migrations, prevalence and time axis of the complex model (see paper referenced above for details). 

TODO 
running time should be linear in sample size, but is actually super-linear

"""


from pylab import *
import likhelper2
import  pdb, csv, time, sys, numpy, datetime, bisect, weakref
from copy import deepcopy
from Bio import Phylo
from cStringIO import StringIO
from scipy.integrate import odeint
from scipy.interpolate import InterpolatedUnivariateSpline, interp1d

__version__ = '0.2.0'

class TreeIncompatibleWithModelException(Exception):
	pass
#
class NegativeBranchLengthException(Exception):
	pass
#


class GeneGenealogy(Phylo.BaseTree.Tree):
	""" Extends Phylo.BaseTree.Tree. Incorporates heights and states of taxa. Heights are relative to most recent sample. 
	
	"""
	noMRCA = False # True if there are multiple trees in nwkstring
	
	def __init__(self, nwkstring, sampleTimes, sampleStates, rooted = True, id=None, name=None, repair_negative_branch_lengths = False):
		"""
		
		nwkstring -- type string, newick format tree
		sampleTimes -- type dict, keyed by taxon names (str), values type float
		sampleStates -- type dict, keyed by taxon names (str), values numpy.array(float), length m(int)
		"""
		self.m = len(sampleStates.values()[0])
		
		self.sampleTimes = sampleTimes
		self.sampleStates = sampleStates
		self.nwkstring = nwkstring
		
		self.nwkstrings = nwkstrings = nwkstring.split(';')
		biopython_phylos= list()
		
		for nwk in nwkstrings:
			try:
				nwk = nwk.strip('\n ') + ';'
				if len(nwk)==1 or nwk.count(':')==0:
					continue
				biopython_phylos.append( Phylo.read(StringIO(nwk), "newick") )
			except:
				pass
		#
		if len(biopython_phylos)==0:
			raise Exception('Invalid newick tree')
		elif len(biopython_phylos)>1:
			self.noMRCA = True
		#
		
		clades = list()
		for bp in biopython_phylos:
			if len(bp.clade.clades)==0:
				#~ a single taxon; make sure branch length is correct
				clades.append(bp.clade)
			else:
				clades.extend(bp.clade.clades)
		self.root = Phylo.BaseTree.Clade( clades = clades )
		
		Phylo.BaseTree.Tree.__init__(self, root=self.root, rooted=True, id=id, name=name)
		
		# add heights, sample times , and states
		self.max_sample_time = max_sample_time = max(sampleTimes.values())
		self.sampleHeights_dict = dict()
		for u in self.get_terminals():
			u.sample_time = sampleTimes[u.name] 
			u.state = sampleStates[u.name]
			u.height = max_sample_time - u.sample_time
			self.sampleHeights_dict[u.name] = u.height
		#
		self.min_branch_length = min([abs(u.branch_length) for u in self.get_terminals() + self.get_nonterminals() if u!=self.root and u.branch_length>0.])
		if min([abs(u.branch_length) for u in self.get_terminals() + self.get_nonterminals() if u!=self.root]) <=0.:
			print 'Warning: Tree has zero or negative branch lengths'
		for u in self.get_terminals():
			root2u = self.get_path(target = u)
			lv = u
			for v in root2u[::-1][1:]:
				if v.__dict__.has_key('height') and v.height == lv.height + lv.branch_length:
					break
				elif v.__dict__.has_key('height') and abs(v.height - (lv.height + lv.branch_length)) > 1e-4*self.min_branch_length:
					#~ print 'Warning: sample times incompatible with branch lengths, %s, %s, %1.4f. Adjusting branch length.' % (v.name, lv.name, v.height - (lv.height + lv.branch_length))
					lv.branch_length = v.height - lv.height
				if lv.branch_length<0:
					raise NegativeBranchLengthException()
				v.height = lv.height + lv.branch_length
				v.name = lv.name  + '.'
				lv = v
		#
		self.clade.height = root2u[0].height + root2u[0].branch_length
		
		treeHasNegativeBranchLengths = True
		while treeHasNegativeBranchLengths:
			treeHasNegativeBranchLengths = False
			for u in self.get_nonterminals():
				#~ try:
					#~ clades = u.clades
				#~ except:
					#~ pdb.set_trace()
				#~ h = max(v1.height, v2.height)
				h = max([c.height for c in u.clades])
				if h >= u.height:
					treeHasNegativeBranchLengths = True
					if not repair_negative_branch_lengths:
						raise NegativeBranchLengthException()
					#
					print 'Repairing negative branch length', u.name, h, u.height, [v.height for v in u.clades], self.min_branch_length
					u.height = h + self.min_branch_length
		#
		#~ repair branch lengths
		for u in self.get_nonterminals():
			for v in u.clades:
				v.branch_length = u.height - v.height
				if v.branch_length <=0:
					raise NegativeBranchLengthException()
			#~ v1,v2 = u.clades
			#~ v1.branch_length = u.height - v1.height
			#~ v2.branch_length = u.height - v2.height
			#~ if v1.branch_length <=0 or v2.branch_length<=0:
				#~ raise NegativeBranchLengthException()
		
		
		#~ self.sampleHeightsArray = array( [self.find_clades(name=kk).next().height for kk in sampleTimes.keys()] ) # find_clades function horribly slow
		self.taxa = [u.name for u in self.get_terminals()]
		self.sampleTimesArray = array(  [sampleTimes[kk] for kk in self.taxa ] )
		self.sampleStatesArray = array( [sampleStates[kk] for kk in self.taxa] )
		self.sampleHeightsArray = array( [self.sampleHeights_dict[kk] for kk in self.taxa] )
	#
	
	def get_parent(self, child_clade):
		""" Returns parent of childe_clade(Phylo.BaseTree.Clade) """
		try:
			return self.get_path(child_clade)[-2]
		except: #return root
			return self.root
	#
	
	def get_extant_branches(self, h):
		""" Returns list of Phylo.BaseTree.Clade which have incoming branches at height h(float) """
		return [u for u in self.get_terminals() + self.get_nonterminals() if u.height <= h and self.get_parent(u).height > h]
	#
	
	def rescale_branch_lengths(self, f):
		""" shrink or expand the height of the tree by factor f """
		#~ adjust internal node heights
		for u in  self.get_nonterminals(): 
			u.height = f * u.height
		#
		#~ repair inconsistant node heights; may exist because sample heights are not changed;
		nbl = True
		while nbl:
			nbl = False
			for u in self.get_nonterminals():
				for v in u.clades:
					if v.height > u.height:
						nbl = True
						u.height = v.height + v.branch_length *f
		#
		#~ repair branch lengths
		for u in self.get_terminals() + self.get_nonterminals():
			if u == self.root:
				continue
			#
			uu = self.get_parent(u)
			u.branch_length = uu.height - u.height
		#
	#
	
	def model_based_rescale_branch_lengths(self, daf, fractionOfTree = 0.50):
		""" model-based method of rescaling tree height and branch lengths;
		rescales height so that nlft matches model prediction;
		
		daf -- instance of DeterministicAncestorFunction 
		fractionOfTree -- Use this fraction of node heights (tip to root) when rescaling; don't use all node heights"""
		
		daftaxis = linspace(0., max(daf.A_taxis), 1000)
		daf_ncbh2h = interp1d([daf.number_coalescents_below_height(h) for h in daftaxis], daftaxis)
		daf_h = daf_ncbh2h( daf.number_coalescents_below_height(daftaxis[-1]) * fractionOfTree)
		
		internalHeights = sort( [u.height for u in self.get_nonterminals() ] ) #if u.height > max(self.sampleHeightsArray)
		f = daf_h / internalHeights[int(fractionOfTree*len(internalHeights))]
		
		#~ print f
		#~ plot(internalHeights)
		#~ plot(daf_ncbh2h(arange(0,max(daf_ncbh2h.x))))
		#~ plot(f * internalHeights)
		#~ pdb.set_trace()
		
		self.rescale_branch_lengths(f)
		
		
		'''#~ daftaxis = linspace(max(self.sampleHeightsArray), max(daf.A_taxis), 1000)
		
		dafnlft = array( [ sum(daf.get(h)) for h in daftaxis] )
		
		#~ dafnlft_daftaxis = interp1d(dafnlft[::-1], daftaxis, bounds_error = False, fill_value = min(dafnlft))
		#~ ncoal = int(max(dafnlft) - min(dafnlft))
		#~ _dafnlft = dafnlft[0] - arange( ncoal )
		#~ _dafnlft = _dafnlft[_dafnlft>=1]
		
		dafncoals = max(dafnlft) - dafnlft
		ncoal = max(dafncoals)
		dafncoal_dafheight = interp1d(dafncoals, daftaxis, bounds_error = False, fill_value = max(daftaxis))
		
		
		Tgg = sort( [u.height for u in self.get_nonterminals() ] ) #if u.height > max(self.sampleHeightsArray)
		if len(Tgg) > ncoal:
			Tgg = Tgg[:ncoal]
		
		T_A = dafncoal_dafheight( arange(len(Tgg) ) )
		
		#~ standardize times?
		T_A = T_A - min(T_A)
		Tgg = Tgg - min(Tgg)
		
		if fractionOfTree:
			nt = int( fractionOfTree * len(T_A) )
			T_A = T_A[:nt]
			Tgg = Tgg[:nt]
		
		#~ from scipy import polyfit
		#~ f, b = polyfit( Tgg, T_A, 1)
		#~ print f, b
		from scipy.optimize import fmin_bfgs
		objfun = lambda f: sum( (T_A - f*Tgg)**2. )
		R = fmin_bfgs(objfun, x0=1.)
		#~ print R
		f = R[0]
		print f
		#~ if f * self.root.height > max(daf.A_taxis):
			#~ f = (max(daf.A_taxis)*.98) / self.root.height #TODO need to choose appropriate scale
			#~ print f
		#~ #
		
		plot(arange(len(Tgg)), Tgg)
		plot(arange(len(T_A)), T_A)
		plot(arange(len(Tgg)), Tgg* f); show()
		pdb.set_trace()
		#~ 
		self.rescale_branch_lengths(f)
		'''
	#
	
	n_truncated = 0
	def truncate_tree(self, maxH):
		"""remove nodes beyond height maxH"""
		new1stGen = list()
		self.n_truncated = len( [u for u in  self.get_terminals() + self.get_nonterminals() if u.height > maxH] ) 
		if self.n_truncated <= 1:
			return None
		else: 
			self.n_truncated -=1 # b/c root is not truncated
		for u in self.get_terminals() + self.get_nonterminals():
			if u == self.root:
				continue
			uu = self.get_parent(u)
			if u.height < maxH and uu.height > maxH:
				new1stGen.append(u)
		#
		#~ self.root = Phylo.BaseTree.Clade( clades = new1stGen )
		#~ Phylo.BaseTree.Tree.__init__(self, root=self.root, rooted=True)
		self.root.clades = new1stGen
		if self.root.height > maxH:
			self.root.height = maxH
		#
		for u in new1stGen:
			u.branch_length = self.root.height - u.height
#


class GeneGenealogyStates(GeneGenealogy):
	""" Extends GeneGenealogy. Also computes state at each node. Can be uesd for likelihood and transmission probabilities. Supports multiple trees (if there is no MRCA).
	
	"""
	#~ situations which may give -inf likelihood
	nLineagesExceedsPopulationSize = False
	impossibleCoalescentEvent = False # if there is a node with incompatible daughter branches
	forgive_zero_S = True
	
	def __init__(self, nwkstring, sampleTimes, sampleStates, fgy, finiteSizeCorrection=False, rooted = True, id=None, name=None, repair_negative_branch_lengths=False, rescaleBranchLengths = None, modelBasedRescaleBranchLengths = None, truncateTree = None):
		""" See GeneGenealogy.__init__
		
		fgy -- Instance of FGY. Provides births, migrations, and population size over time. 
		
		Keyword arguments: 
		finiteSizeCorrection -- Account for finite size of population when calculating states. More accurate, but slower. 
		rescaleBranchLengths -- Rescales branch lengths by this factor before calculating states & likelihood
		modelBasedRescaleBranchLengths -- Rescales branch lengths to minimize RSS between observed node heights and predicted node heights; if this variable is type float (0-1), will only use this fraction of node heights (tip to root) when calculating the RSS """
		#~ super(GeneGenealogyStates, self).__init__(self, nwkstring, sampleTimes, sampleStates, rooted = rooted, id=id, name=name)
		GeneGenealogy.__init__(self, nwkstring, sampleTimes, sampleStates, rooted = rooted, id=id, name=name, repair_negative_branch_lengths=repair_negative_branch_lengths)
		
		if rescaleBranchLengths:
			self.rescale_branch_lengths(rescaleBranchLengths)
		elif modelBasedRescaleBranchLengths:
			daf = DeterministicAncestorFunction(sampleTimes, sampleStates,fgy, tres = 470)
			#~ daf = Deterministic_Ancestor_StateTransition_Function(sampleTimes, sampleStates,fgy)
			if type(modelBasedRescaleBranchLengths)==bool:
				self.model_based_rescale_branch_lengths(daf)
			else: 
				self.model_based_rescale_branch_lengths(daf, fractionOfTree = modelBasedRescaleBranchLengths)
		
		if truncateTree:
			self.truncate_tree(truncateTree)
		#
		
		self.fgy = fgy
		
		self.mostRecentSampleHeight = max(fgy.t) - self.max_sample_time
		
		self.A1s = list()
		self.bsA1s = list()
		self.t_A1s = list()
		
		coalescentOrSampleHeights = self.coalescentOrSampleHeights = sort(unique([n.height for n in self.get_terminals() + self.get_nonterminals()])) 
		sampledAtHeight = dict(zip(coalescentOrSampleHeights, [list() for i in range(len(coalescentOrSampleHeights))] ))
		coalescentAtHeight = dict(zip(coalescentOrSampleHeights, [list() for i in range(len(coalescentOrSampleHeights))] ))
		for n in self.get_terminals():
			sampledAtHeight[n.height].append(n)
		#
		for u in self.get_nonterminals():
			if u==self.root and self.noMRCA:
				continue
			#
			if len(u.clades) != 2:
				continue
			coalescentAtHeight[u.height].append(u)
		#
		nodes2update = list() #corresponding to extant lineages that need updating
		self.S = 1.
		self.Ss = list()
		h0 = 0.
		self.extant = list() # extant branches in each interval
		for h in coalescentOrSampleHeights:
			if h >= self.fgy.mostRecentTime:
				break
			
			#~ update states for all extant lineages
			self._update_branches(nodes2update, h0, h)
			h0 = h
			
			#~ add new samples
			if len(sampledAtHeight[h]) > 0:
				for n in sampledAtHeight[h]:
					n.branchState = n.state
				nodes2update += sampledAtHeight[h]
			#
			
			#~ coalescents
			if len(coalescentAtHeight[h]) > 0:
				self.Ss.append(max(self.S, 0))
				self.S = 1.
				for u in coalescentAtHeight[h]:
					nodes2update.remove(u.clades[0])
					nodes2update.remove(u.clades[1])
					self._new_state(u) #state of new node
					if finiteSizeCorrection:
						#~ more accurate, but slow
						self._update_nonCoalescent_states(nodes2update, u) #finite size corrections for other branches
					nodes2update.append(u)
			#
			self.extant.append([weakref.ref(u) for u in nodes2update])
		#
		#~ adjust for numerical errors
		self.Ss = array(self.Ss); self.Ss[self.Ss>=1.] = mean(self.Ss[self.Ss<1.])
		if self.forgive_zero_S:
			nonzeroS = self.Ss[self.Ss> 0]
			self.Ss[self.Ss<=0] = min(nonzeroS)
		
		self.A1s = array(self.A1s)
		self.bsA1s = array(self.bsA1s)
	#
	
	def _update_nonCoalescent_states(self, nodes2update, u):
		#~ TODO check running time in profiler; can probably be faster
		Y0 = self.Y0 # should be set in self._new_state
		for v in nodes2update:
			p_i = v.branchState
			fterm = u.branchState * p_i * (Y0-1)/(Y0-p_i)
			stermmat = repeat(u.branchState * Y0 / (Y0-p_i), self.m).reshape((self.m, self.m)).T
			stermmat[ diag_indices_from(stermmat) ] = 0.
			sterm = p_i * sum(stermmat, axis=1)
			v.branchState = fterm + sterm
	#
	
	def _new_state(self, u):
		if u == self.root and self.noMRCA:
			#~ root is not MRCA
			u.rate = 1.
			return
		statei, statej = (u.clades[0].branchState, u.clades[1].branchState )
		F,G,Y = self.fgy.get(u.height, mostRecentSampleHeight = self.mostRecentSampleHeight)
		self.Y0 = Y
		nstate = zeros(self.fgy.m)
		u.rate = 0.
		for k,l in self.fgy.nonZeroBirths:
			#~ march 1 '13, added finite size corrections
			if k==l:
				rate = F[k,l] * (statei[k] * statej[k] * (Y[k]-1)/(Y[k]-statej[k]) + statei[k] * statej[k] * (Y[k]-1)/(Y[k]-statei[k])) / Y[k] / (Y[k]-1)
			else:
				rate = F[k,l] * (statei[k] * statej[l] * Y[k] / (Y[k] - statej[k]) + statei[l] * statej[k]* Y[l] / (Y[l] - statej[l])) / Y[k] / Y[l]
			nstate[k] += rate
			u.rate += rate
		#
		assert sum(isnan(nstate)) == 0
		
		
		if sum(nstate)==0:
			self.impossibleCoalescentEvent = True
			u.state = (statei + statej) / 2.
			u.branchState = (statei + statej) / 2.
		else:
			u.branchState =  nstate / sum(nstate)
			u.state = copy(u.branchState)
		#
	
	def _update_branches(self, nodes2update, h0, h):
		if h0==h or len(nodes2update)==0:
			return None
		# make initial conditions
		A0 = zeros( self.fgy.m )
		for u in nodes2update:
			A0 += u.branchState
		
		Q0 = eye(self.fgy.m)
		S0 = self.S
		AQS0 = concatenate((A0, ravel(Q0), [S0] ))
		
		# make solution intervals; each interval less than deltat
		t = linspace(h0, h, 2 + floor((h-h0)/self.fgy.min_deltat))
		
		for hh0,hh1 in zip(t[:-1], t[1:]):
			F0,G0,Y0 = self.fgy.get(hh0, mostRecentSampleHeight = self.mostRecentSampleHeight)
			F1,G1,Y1 = self.fgy.get(hh1, mostRecentSampleHeight = self.mostRecentSampleHeight)
			#~ renormalize A
			AQS0[:self.fgy.m] = minimum( AQS0[:self.fgy.m] , Y0)
			try:
				AQS1 = likhelper2.solveAQS(hh0, hh1-hh0, self.fgy.minPopSize,  self.fgy.m, len(self.fgy.nonZeroElements), AQS0, F0, F1, G0, G1, Y0, Y1, array(self.fgy.nonZeroElements, dtype=float))
				# march '13 renormalize variables to avoid numerical problems;
				Q = AQS1[self.fgy.m:(self.fgy.m+self.fgy.m**2)].reshape((self.fgy.m, self.fgy.m))
				Q = abs(Q)
				rowsumQ = sum(Q, axis = 1)
				Q = Q / rowsumQ.reshape((len(Q),1))
				AQS1[self.fgy.m:(self.fgy.m+self.fgy.m**2)] = ravel(Q)
				S = max( AQS1[-1], 0); S = min(S, 1); AQS1[-1] = S
			except: 
				#~ numerical problems;  heuristically reduce S:
				AQS1 = AQS0
				A = sum( AQS0[:self.fgy.m] )
				Y = (sum(Y0) + sum(Y1)) / 2.
				F = (sum(F0) + sum(F1)) / 2.
				AQS1[-1] = AQS0[-1] * exp(-(hh1 - hh0) * F * (A/Y)**2.)
			if sum( AQS1[:self.fgy.m] ) > sum( Y1 ):
				self.nLineagesExceedsPopulationSize = True
			#
			AQS0 = AQS1
		#
		
		A1 = AQS1[:self.fgy.m]
		Q = AQS1[self.fgy.m:(self.fgy.m+self.fgy.m**2)].reshape((self.fgy.m, self.fgy.m))
		self.S = AQS1[-1]
		
		# renormalization 
		Q = abs(Q)
		rowsumQ = sum(Q, axis = 1)
		Q = Q / rowsumQ.reshape((len(Q),1))
		
		#update state of branches
		bsA1 = zeros(self.fgy.m) 
		for u in nodes2update:
			u.branchState = abs( dot( Q.T , u.branchState) )
			assert sum(u.branchState)>0
			u.branchState = u.branchState / sum(u.branchState)
			
			bsA1 += u.branchState #
		#
		
		self.A1s.append(A1)
		self.bsA1s.append(bsA1)
		self.t_A1s.append(t[-1])
	#
	
	
	def get_extant_branches(self, h):
		""" Returns list of Phylo.BaseTree.Clade which have incoming branches at height h(float) """
		return [r() for r in  self.extant[ bisect.bisect_right(self.coalescentOrSampleHeights, h) -1 ] ]
#



class AncestorFunction(object): 
	"""Provides methods to get number of lineages (state-dependent) as a function of time. """
	def __init__(self, ggs, tres = 1000):
		"""
		
		ggs -- Instance of GeneGenealogyStates 
		
		"""
		self.interpolators = [interp1d(ggs.t_A1s, AA, fill_value = 1, bounds_error = False )  for AA in array(ggs.bsA1s).T ]
		t = sort( ggs.t_A1s )
		self.deltat = (max(ggs.t_A1s) - min(ggs.t_A1s)) / tres
	#
	
	def get(self, t):
		"""Returns NLFT (numpy.array(float)) for each of m(int) states at height t(float). """
		return array( [i(t) for i in self.interpolators] )
#


class DeterministicAncestorFunction(AncestorFunction):
	"""Extends AncestorFunction. Uses a fast ODE approximation to the NLFT.
	
	running time is sensitive to tres"""
	def __init__(self, sampleTimes, sampleStates, fgy, tres = 1000 ):
		self.fgy = fgy
		k = sampleTimes.keys()
		st_list = [sampleTimes[kk] for kk in k]
		self.sampleTimesArray = array(st_list)
		ss_list = [sampleStates[kk] for kk in k]
		mst = max(st_list)
		sh_list = [mst - st for st in st_list]
		self.sampleHeightsArray = array(sh_list)
		uSampleHeights = sort( unique( sh_list) )
		self.mostRecentSampleHeight = max(fgy.t) - mst
		newSampleStates = dict.fromkeys(uSampleHeights)
		m = len( ss_list[0] )
		#~ for st, sh, ss in zip(gg.sampleTimesArray, gg.sampleHeightsArray, gg.sampleStatesArray):
		for st, sh, ss in zip(st_list, sh_list, ss_list):
			try:
				newSampleStates[sh] += array(ss)
			except:
				newSampleStates[sh] = zeros(m)
				newSampleStates[sh] += array(ss)
		#
		
		A0 = zeros(m)
		if newSampleStates.has_key(0.):
			A0+=newSampleStates[0.]
		Alast = A0
		xA = self.A = [Alast]
		
		
		#~ "ddiiOOOOOOOO", &t0, &deltat, &m, &lengthNonZeroElements, &A0, &b0, &b1, &mig0, &mig1, &p0, &p1, &nonZeroElements
		A_taxis = self.A_taxis = sort( unique(concatenate((uSampleHeights , linspace(0., fgy.mostRecentTime, tres))) ) )
		self.deltat = fgy.mostRecentTime / tres
		
		for h0, h1 in zip(A_taxis[:-1], A_taxis[1:]): # don't forget the last interval
			A0=Alast
			F0,G0,Y0 = fgy.get(h0, mostRecentSampleHeight = self.mostRecentSampleHeight)
			F1,G1,Y1 = fgy.get(h1, mostRecentSampleHeight = self.mostRecentSampleHeight)
			Alast = likhelper2.solveA(h0, h1-h0, fgy.minPopSize, m, len(fgy.nonZeroElements), A0, F0, F1, G0, G1, Y0, Y1, array(fgy.nonZeroElements, dtype=float))
			if newSampleStates.has_key(h1):
				Alast+=newSampleStates[h1]
			#
			xA.append(Alast)
		#
		
		#~ pdb.set_trace()
		
		# make interpolators
		self.interpolators = [interp1d(A_taxis, AA, fill_value = 1, bounds_error = False )  for AA in array(xA).T ]
		
		self.number_coalescents_below_height_interp = interp1d( A_taxis,   sort([abs( sum( self.sampleHeightsArray <= h ) - sum( self.get(h) )) for h in A_taxis ]) )
	#
	
	def number_coalescents_below_height(self, h):
		# number of coalescent events below height h (float)
		
		#~ a = sum( self.get(h) )
		#~ nbelowh = sum( self.sampleHeightsArray <= h )
		#~ return nbelowh - a
		
		return self.number_coalescents_below_height_interp(h)
#

class Deterministic_Ancestor_StateTransition_Function(AncestorFunction):
	"""Extends AncestorFunction. Also uses fast ODE approximation to calculate number of coalescents and migration events. """
	def __init__(self, sampleTimes, sampleStates, fgy, tres = 1000 ):
		k = sampleTimes.keys()
		st_list = [sampleTimes[kk] for kk in k]
		ss_list = [sampleStates[kk] for kk in k]
		mst = max(st_list)
		self.mostRecentSampleHeight = max(fgy.t) - mst
		sh_list = [mst - st for st in st_list]
		uSampleHeights = sort( unique( sh_list) )
		newSampleStates = dict.fromkeys(uSampleHeights)
		m = self.m = len( ss_list[0] )
		#~ for st, sh, ss in zip(gg.sampleTimesArray, gg.sampleHeightsArray, gg.sampleStatesArray):
		for st, sh, ss in zip(st_list, sh_list, ss_list):
			try:
				newSampleStates[sh] += array(ss)
			except:
				newSampleStates[sh] = zeros(m)
				newSampleStates[sh] += array(ss)
		#
		
		A0 = zeros(m)
		if newSampleStates.has_key(0.):
			A0+=newSampleStates[0.]
		Alast = concatenate( (A0, zeros(m**2) , zeros(m**2)))
		xA = self.A = [Alast]
		
		#~ "ddiiOOOOOOOO", &t0, &deltat, &m, &lengthNonZeroElements, &A0, &b0, &b1, &mig0, &mig1, &p0, &p1, &nonZeroElements
		A_taxis = self.A_taxis = sort( unique(concatenate((uSampleHeights , linspace(0., fgy.mostRecentTime, tres))) ) )
		self.deltat = fgy.mostRecentTime / tres
		
		#~ pdb.set_trace()
		
		for h0, h1 in zip(A_taxis[:-1], A_taxis[1:]): # don't forget the last interval
			A0=Alast
			F0,G0,Y0 = fgy.get(h0, mostRecentSampleHeight = self.mostRecentSampleHeight)
			F1,G1,Y1 = fgy.get(h1, mostRecentSampleHeight = self.mostRecentSampleHeight)
			try:
				Alast = likhelper2.solveAM(h0, h1-h0, fgy.minPopSize, m, len(fgy.nonZeroElements), A0, F0, F1, G0, G1, Y0, Y1, array(fgy.nonZeroElements, dtype=float))
			except:
				pdb.set_trace()
			#~ print h0, sum(Alast[:m])
			if sum(Alast[:m] < 0) > 0:
				Alast = A0
				#~ pdb.set_trace()
			#
			if sum(Alast[:m]) > sum(A0[:m]):
				Alast = A0 # should only happen due to sampling
			if newSampleStates.has_key(h1):
				Alast+=  concatenate(( newSampleStates[h1], zeros(2*m**2) ) )
			#
			xA.append(Alast)
		#
		
		# make interpolators
		self.interpolators = [interp1d(A_taxis, AA, fill_value = 1, bounds_error = False )  for AA in array(xA).T[:m] ]
		self.M_interpolators = [interp1d(A_taxis, AA, fill_value = 1, bounds_error = False )  for AA in array(xA).T[m:(m + m**2)] ]
		self.C_interpolators = [interp1d(A_taxis, AA, fill_value = 1, bounds_error = False )  for AA in array(xA).T[(m + m**2):] ]
	#
	
	def get_M(self, t):
		"""Returns mXm numpy.array describing cumulative number of migration events at height t. """
		return array( [i(t) for i in self.M_interpolators] ).reshape((self.m, self.m))
	#
	
	def get_Mkl(self, t, k, l):
		"""Returns cumulative number(float) of migration events at height t(float) from state k(int) to l(int). """
		return self.M_interpolators[k*self.m + l](t)
	#
	
	def get_C(self, t):
		"""Returns mXm numpy.array describing cumulative number(float) of coalescent events at height t(float). """
		return array( [i(t) for i in self.C_interpolators] ).reshape((self.m, self.m))
	#
	
	def get_Ckl(self, t, k, l):
		"""Returns cumulative number(float) of coalescent events at height t(float) from state k(int) to l(int). """
		return self.C_interpolators[k*self.m + l](t)
#


class FGY(object):
	"""Class to compute birth rates, migration rates, population size as a function of height in tree;
	
	F & G length T - 1 lists, correspond to intervals in time axis.
	
	"""
	
	def __init__(self, t, births, migrations, prevalence):
		"""
		
		t -- numpy.array(float), length T. Time axis. 
		births,migrations -- length T-1 list of mXm numpy.array(float). Birth & Migration events. 
		prevalence -- length T list of length m numpy.array(float). Population size at each time. 
		
		"""
		self.t = t; self.births = births; self.prevalence = prevalence
		self.m = len(prevalence[0])
		self.mostRecentTime = max(t)
		self.min_deltat = mean(t[1:] - t[:-1]) #min(t[1:] - t[:-1])
		#~ determine which elements are nonzero
		birthsMigrations = [b + m for b,m in zip(births,migrations)]
		birthsMigrations =sum(birthsMigrations, axis=0)
		self.nonZeroElements = list()
		self.nonZeroBirths = list()
		self.nonZeroMigrations = list()
		for k in range(len(birthsMigrations)):
			for l in range(len(birthsMigrations)):
				if birthsMigrations[k,l] > 0 or birthsMigrations[l,k]>0:
					self.nonZeroElements.append((k,l))
				if birthsMigrations[k,l] > 0:
					self.nonZeroBirths.append((k,l))
				if birthsMigrations[l,k]>0:
					self.nonZeroMigrations.append((k,l))
		#
		
		#~ interpolators on forward time axis;
		self.cum_births = [zeros((self.m, self.m))]
		self.cum_migrations = [zeros((self.m, self.m))]
		for b,m in zip(births, migrations):
			self.cum_births.append( self.cum_births[-1] + b ) 
			self.cum_migrations.append( self.cum_migrations[-1] + m )
		#
		self.interp_cum_births = dict()
		self.interp_cum_migrations = dict()
		self.interp_prevalence = dict()
		for k in range(self.m):
			y = [p[k] for p in prevalence ]
			self.interp_prevalence[k] = InterpolatedUnivariateSpline(t, y)
			for l in range(self.m):
				b = [ bb[k,l] for bb in self.cum_births]
				m = [ mm[k,l] for mm in self.cum_migrations]
				self.interp_cum_births[(k,l)] = InterpolatedUnivariateSpline(t, b)
				self.interp_cum_migrations[(k,l)] = InterpolatedUnivariateSpline(t, m)
		#
		
		#~ minimum population size for numerical stability of the ODEs
		p = ravel(prevalence); p[p==0] = inf; self.minPopSize = min(p)
	#
	def get(self, h, mostRecentSampleHeight = 0.):
		"""Return birth rates (mXm numpy.array), migration rates(mXm numpy.array), and population size(length m numpy.array) at height h(float)."""
		tt = self.mostRecentTime - (h + mostRecentSampleHeight)
		Y  = array([self.interp_prevalence[k](tt) for k in range(self.m)]).flatten()
		Y[Y< self.minPopSize] = self.minPopSize
		dF = zeros( (self.m, self.m) )
		dG = zeros( (self.m, self.m) )
		for k,l in self.nonZeroElements:
			try:
				dF[k,l] = self.interp_cum_births[(k,l)].derivatives(tt)[1]
			except: # the derivative is not defined 
				dF[k,l] = 0.
			try:
				dG[k,l] = self.interp_cum_migrations[(k,l)].derivatives(tt)[1]
			except:
				dG[k,l] = 0.
		#
		return abs(dF), abs(dG), abs(Y)
	#
#




def log_likelihood(ggs):
	"""Returns the log likelihood (float) for a gene genealogy conditional on a complex demographic history (instance of GeneGenealogyStates). """
	ll = 0.
	nlterms = 0
	for u in ggs.get_nonterminals():
		if not u.__dict__.has_key('rate'):
			continue
		if u.rate<=0:
			return -inf
		ll+=log(u.rate)
		nlterms += 1
	#
	if ggs.noMRCA: 
		# the root rate should not count in the likelihood
		ll -= log(ggs.root.rate)
		nlterms -= 1
	#
	ll += sum(log(ggs.Ss))
	meanTerm = ll / float(nlterms)
	
	ll += ((ggs.count_terminals()-1) - nlterms) * meanTerm # in case the tree was truncated; ggs.n_truncated
	return ll
#


def transmission_probabilities(gg, tol=1e-6, detailed_output = False):
	"""Returns dictionary of transmission probabilities between taxa given instance of GeneGenealogyStates
	
	DONE this is missing a step; needs to make discrete update to \rho given F at each node
	\rho_{ik}(s_{ij} + \epsilon) &= \frac{ \sum_l F_{kl} \rho_{ik} p_{jl} }{\sum_{k'}\sum_l F_{k'l}  \rho_{ik'}  p_{jl} } \\
&= \frac{\rho_{ik} F \rho_{j\cdot} }{ \rho_{i\cdot} F p_{j\cdot}}. 
	"""
	fgy = gg.fgy
	mostRecentSampleHeight = gg.mostRecentSampleHeight
	# 2d dicts keyed by node: (u,v) -> scalar probability or state
	rterminals = [weakref.ref(u) for u in gg.get_terminals() ]
	rnonterminals = [weakref.ref(u) for u in gg.get_nonterminals() ]
	
	#psi_i
	pIdentity = dict.fromkeys( rterminals + rnonterminals ) #gg.get_terminals() + gg.get_nonterminals()
	#undergoes discrete change at transmission
	pIdentityPostTransmission = dict.fromkeys(rterminals + rnonterminals) 
	
	# rho_ik
	state = dict.fromkeys(rterminals + rnonterminals ) #
	#undergoes discrete change at transmission
	statePostTransmission = dict.fromkeys(rterminals + rnonterminals)
	
	for u in pIdentity.keys():
		pIdentity[u] = dict.fromkeys(rterminals) #gg.get_terminals()
		pIdentityPostTransmission[u] = dict.fromkeys(rterminals)
		state[u] =  dict.fromkeys(rterminals)
		statePostTransmission[u] = dict.fromkeys(rterminals)
	
	daf = AncestorFunction(gg) #DeterministicAncestorFunction(gg, fgy) # approximate nlft in each state
	
	
	def drho_dpsi( x, t): 
		#~ state equations for transmission probabilities
		dx = zeros(len(x))
		rho = x[:-1]
		psi = x[-1] #= psi
		
		F,G,Y = fgy.get(t, mostRecentSampleHeight = mostRecentSampleHeight )
		A = daf.get(t)
		
		A = minimum(A,Y) # A cannot exceed Y
		fractionYnotA = (Y - A) / Y#[ max((Y[k] - A[k]) / Y[k],0.) for k in range(fgy.m)]
		
		
		#dQ
		'''lambdaPsi = 0.
		R = zeros((fgy.m,fgy.m))
		#~ for k,l in self.nonZeroElements:
		for k in range(fgy.m):
			for l in range(fgy.m):
				if k!=l:
					#~ lambdaS += F[k,l] * (A[k]/Y[k]) * (A[l]/Y[l]) #TODO probably wrong
					R[k,l] += G[l,k] / Y[k] 
					#+ F[l,k] * fractionYnotA[l]/ Y[k] <- this term not included b/c host of line would change
				else:
					lambdaPsi += F[k,l] * fractionYnotA[k] * rho[l] / Y[l]  # conditional on no coalescent occurring
				#
		#
		for k in range(fgy.m):
			R[k,k] = - sum(R[k])
		#'''
		
		# check
		R = (G / Y).T
		R = R * (1-eye(fgy.m))
		R = R - sum(R, axis = 1)*eye(fgy.m)
		lambdaPsi = sum( (F * fractionYnotA).T * rho / Y )
		#~ pdb.set_trace()
		
		
		dx[:-1] = dot(rho, R)
		dx[-1] = -psi * lambdaPsi
		
		return dx
	#
	
	def down_state_pIdentity(r_lv, r_v, r_u):
		lv = r_lv(); v = r_v(); u = r_u()
		x0 = hstack( ( statePostTransmission[r_lv][r_u], pIdentityPostTransmission[r_lv][r_u] ))
		t = hstack((arange(lv.height, v.height, daf.deltat), v.height))
		
		x = odeint( drho_dpsi, x0, t)
		
		'''x = [x0]
		for h0, h1 in zip(t[:-1], t[1:]): 
			A0=daf.get(h0)
			A1 = daf.get(h1)
			F0,G0,Y0 = fgy.get(h0, mostRecentSampleHeight = mostRecentSampleHeight)
			F1,G1,Y1 = fgy.get(h1, mostRecentSampleHeight = mostRecentSampleHeight)
			try:
				x1 = likhelper2.solve_p_psi(h0, h1-h0, fgy.minPopSize, fgy.m, len(fgy.nonZeroElements), x0, F0, F1, G0, G1, Y0, Y1, A0,A1, array(fgy.nonZeroElements, dtype=float))
				
			except:
				#~ pdb.set_trace()
				x1 = copy(x0)
			
			x.append(copy(x1))
			x0 = copy(x1)
		#'''
		return abs(x[-1][:-1]) / sum(abs(x[-1][:-1])), max(0., x[-1][-1])
	#
	
	def down_statePostTransmission(r_lv, r_v, r_u):
		d1 = r_lv(); v = r_v(); u = r_u()
		#~ each at top of incoming branch at node v
		F,G,Y = fgy.get(v.height, mostRecentSampleHeight = mostRecentSampleHeight)
		if d1==v.clades[0]:
			d2 = v.clades[1]
		elif d1==v.clades[1]:
			d2 = v.clades[0]
		#
		return state[r_v][r_u] * dot(F, d2.state) / dot(state[r_v][r_u], dot(F, d2.state) ) 
		
		#~ return state[r_v][r_u] # 
	#
	
	def down_pIdentityPostTransmission(r_lv, r_v, r_u):
		d1 = r_lv(); v = r_v(); u = r_u()
		#~ each at top of incoming branch at node v
		F,G,Y = fgy.get(v.height, mostRecentSampleHeight = mostRecentSampleHeight)
		if d1==v.clades[0]:
			d2 = v.clades[1]
		elif d1==v.clades[1]:
			d2 = v.clades[0]
		#
		numerator = 0.
		denominator = 0.
		for k in range(fgy.m):
			for l in range(fgy.m):
				try:
					numerator += F[k,l] * ( state[r_v][r_u][k] * d2.state[l] ) / Y[k] / Y[l]
					denominator += F[k,l] * ( state[r_v][r_u][k] * d2.state[l] + state[r_v][r_u][l] * d2.state[k]) / Y[k] / Y[l] #
				except:
					#~ pdb.set_trace()
					continue
		#
		if numerator== 0 and denominator == 0:
			return pIdentity[r_v][r_u]
		elif denominator == 0:
			#~ pdb.set_trace()
			return 0.
		return pIdentity[r_v][r_u] * numerator / denominator
	#
	
	#~ wr = weakrefs = dict([(u, weakref.ref(u)) for u in gg.get_terminals() + gg.get_nonterminals()]) #TODO
	#~ calculate states and pIdentity at each internal node
	#~ iterate over all paths tip -> root
	print time.ctime(), '1st pass over terminals' #psi & rho for all paths (node -> terminal) 
	debugstate = list()
	for r_u in rterminals:#gg.get_terminals():
		u = r_u()
		if gg.noMRCA:
			root2u = gg.get_path(target=u)
		else:
			root2u = [gg.clade] + gg.get_path(target=u)
		u2root = root2u[::-1]
		pIdentity[r_u][r_u] = 1. #psi_u
		pIdentityPostTransmission[r_u][r_u] = 1.
		state[r_u][r_u] = u.state
		statePostTransmission[r_u][r_u] = u.state
		lv = u; r_lv = weakref.ref(lv)
		pastTolerance = False
		for v in u2root[1:]:
			r_v = weakref.ref(v)
			if (not pastTolerance) or lv==u: # speed things up by not calculating with p close to zero
				state[r_v][r_u], pIdentity[r_v][r_u] = down_state_pIdentity(r_lv, r_v, r_u)
				pIdentityPostTransmission[r_v][r_u] = down_pIdentityPostTransmission(r_lv, r_v, r_u )
				statePostTransmission[r_v][r_u] = down_statePostTransmission(r_lv, r_v, r_u)
				if pIdentityPostTransmission[r_v][r_u] < tol:
					pastTolerance = True
				lv = v; r_lv = weakref.ref(lv)
			else:
				state[r_v][r_u] = copy(state[r_lv][r_u] )
				statePostTransmission[r_v][r_u] = copy(statePostTransmission[r_lv][r_u] )
				pIdentity[r_v][r_u] = 0. #pIdentityPostTransmission[r_lv][r_u]
				pIdentityPostTransmission[r_v][r_u] = 0. #pIdentityPostTransmission[r_lv][r_u]
				lv = v; r_lv = weakref.ref(lv)
			#
			
			#~ print u.name, state[r_v][r_u]
			debugstate.append((r_v,r_u))
		#
		#~ print time.ctime(), 'done with path states for root ->', u.name
	#
	
	#~ pdb.set_trace()
	#~ sort( [ pIdentity[ weakref.ref(gg.get_parent(x())) ][x] for x in rterminals ] )
	#~ [ sum( state[ weakref.ref(gg.get_parent(x())) ][x]) for x in rterminals ]
	
	# compute Wij matrix
	#~ W = zeros((gg.count_terminals(), gg.count_terminals()))
	#~ W = dict(zip( gg.get_terminals(), [dict.fromkeys(gg.get_terminals(),0.) for u in gg.get_terminals()]  ))
	tipnames = [rt().name for rt in rterminals]
	W= P = dict(zip( tipnames, [dict.fromkeys(tipnames,0.) for tn in tipnames]  ))
	Ws = Ps = list()
	Cs = list()
	for r_u in rterminals: #gg.get_terminals():
		for r_v in rterminals: #gg.get_terminals():
			u = r_u(); v = r_v()
			if u==v:
				W[u.name][v.name] = 0.
				continue
			mrca = gg.common_ancestor(u,v); r_mrca = weakref.ref(mrca)
			if mrca == gg.root and gg.noMRCA:
				W[u.name][v.name] = 0.
				continue
			F,G,Y = fgy.get(mrca.height, mostRecentSampleHeight = mostRecentSampleHeight)
			numerator_uv = 0.
			numerator_vu = 0.
			for k in range(fgy.m):
				for l in range(fgy.m):
					try:
						numerator_uv += F[k,l] * (state[r_mrca][r_u][k] * state[r_mrca][r_v][l]) / Y[k] / Y[l]
						numerator_vu += F[k,l] * (state[r_mrca][r_u][l] * state[r_mrca][r_v][k]) / Y[k] / Y[l]
					except:
						#~ pdb.set_trace()
						continue
			#
			denominator_vu = denominator_uv = numerator_uv + numerator_vu
			if numerator_uv == 0:
				W[u.name][v.name] = 0.
			else:
				W[u.name][v.name] = pIdentity[r_mrca][r_u] * pIdentity[r_mrca][r_v] * numerator_uv / denominator_uv
				if isnan(W[u.name][v.name]):
					pdb.set_trace()
			if numerator_vu == 0:
				W[v.name][u.name] = 0. 
			else:
				W[v.name][u.name] = pIdentity[r_mrca][r_u] * pIdentity[r_mrca][r_v] * numerator_vu / denominator_vu
			#
			
			cophen = mrca.height - u.height + mrca.height - v.height
			Ws.append(W[u.name][v.name])
			Ws.append(W[v.name][u.name])
			Cs+= [ cophen ]*2
			#~ print u.name, v.name, P[r_u][r_v], P[r_v][r_u], numerator_uv, numerator_vu
		#
		print time.ctime(), u.name
	#
	
	if detailed_output:
		return W , Cs, Ws
	return W 
#

def simulate_coalescent(sampleTimes, sampleStates, fgy, tres = 1000, singleMRCA = True):
	"""Fast coalescent simulator conditional on sample times, sample states, and complex demographic history (fgy). 
	
	This coalescent simulator is much faster than the one in colgem1, but also makes additional approximations (distribution of coalescent and migration events described by a single solution of ODEs). These trees will be biased close to the root and when the population size is very small.
	
	sampleTimes -- dict keyed by taxon name, gives time of sampling
	sampleStates -- dict keyed by taxon name, gives length m array
	fgy -- instance of FGY
	
	TODO 
	add option to gracefully recover from invalid coalescent event (incompatible state error)
	
	TODO possible optimizations
	1. AM equations are BIG; m + 2m**2; but 2m**2 can be solved one-by-one given solution to A
	2. shuffle might be slow;
	3. Initializing Clade() n+n-1 times may be slow; can simply construct nwk string
	
	TODO 
	add option to simulate more than one tree (much faster than calling function more than once)
	"""
	print time.ctime(), 'start coal sim'
	
	#~ daf = DeterministicAncestorFunction(sampleTimes, sampleStates, fgy, tres = tres)
	daf = Deterministic_Ancestor_StateTransition_Function(sampleTimes, sampleStates, fgy, tres = tres)
	taxa = sampleTimes.keys()
	mst = max(sampleTimes.values())
	sampleHeights_dict = dict(zip(taxa, [mst - sampleTimes[taxon] for taxon in taxa]))
	t = linspace(0., fgy.mostRecentTime, tres)
	At = [sum(daf.get(h)) for h in t]
	if singleMRCA:
		ncoalescents = len(sampleTimes) - 1
	else:
		ncoalescents = min( round(len(sampleTimes) - At[-1]), len(sampleTimes)-1 ) # if want single MRCA, should be n - 1
	#~ itA = InterpolatedUnivariateSpline( At, t )
	#~ tcoalescents = itA( uniform(size=ncoalescents)  )
	
	COALESCENT = -1
	SAMPLE     = -2
	MIGRATION  = -3
	
	events =  zip( [sampleHeights_dict[taxon] for taxon in taxa], [SAMPLE]*len(taxa), taxa, taxa ) 
	
	fM = daf.get_M(t[-1])
	nM = zeros((fgy.m, fgy.m))
	#~ tM = dict(zip([(k, dict.fromkeys(range(fgy.m)) ) for k in range(fgy.m)]))
	
	#~ coalescent categories
	fC = daf.get_C(t[-1])
	nC = fC.astype(int)
	r = int(max(ncoalescents - sum(nC), 0)); #print r #remainder
	if r >=1:
		from scipy.stats.distributions import rv_discrete
		p = fC / sum(fC)
		p = ravel(p)
		r_i = rv_discrete(name='remainder', values = (arange(fgy.m*fgy.m),p) ).rvs(size=r)
		for ii in r_i:
			l = mod(ii, fgy.m)
			k = int(ii) / int(fgy.m)
			nC[k,l]+=1
	#
	
	
	for k in range(fgy.m):
		for l in range(fgy.m):
			#migrations
			nM[k,l] = int(fM[k,l]) #poisson(fM[k,l])
			if k!=l and fM[k,l] > 0:
				dafgetmkl = array([daf.get_Mkl(tt, k, l) for tt in t]) / fM[k,l];
				idafgetmkl = unique(dafgetmkl, return_index=True)[1]
				try:
					itnMkl = InterpolatedUnivariateSpline(dafgetmkl[idafgetmkl], t[idafgetmkl])
				except:
					itnMkl = interp1d( dafgetmkl[idafgetmkl], t[idafgetmkl])
				if nM[k,l] > 1:
					events.extend( zip( itnMkl( uniform(size=nM[k,l])), [MIGRATION]*nM[k,l], [k]*nM[k,l],[l]*nM[k,l]  ) ) 
				elif nM[k,l] == 1:
					events.extend( zip( [itnMkl( uniform(size=nM[k,l]))], [MIGRATION]*nM[k,l], [k]*nM[k,l],[l]*nM[k,l]  ) )
			#
			#coalescents
			if fC[k,l] > 0:
				dafgetckl = array( [daf.get_Ckl(tt, k, l) for tt in t]) / fC[k,l] 
				idafgetckl = unique(dafgetckl, return_index = True)[1]
				try:
					itnCkl = InterpolatedUnivariateSpline(dafgetckl[idafgetckl], t[idafgetckl])
				except:
					itnCkl = interp1d(dafgetckl[idafgetckl], t[idafgetckl])
					#~ pdb.set_trace()
				if nC[k,l]  > 1:
					events.extend( zip( itnCkl( uniform(size=nC[k,l])), [COALESCENT]*nC[k,l], [k]*nC[k,l],[l]*nC[k,l]  ) )
				elif nC[k,l] == 1:
					events.extend( zip( [itnCkl( uniform(size=nC[k,l]))], [COALESCENT]*nC[k,l], [k]*nC[k,l],[l]*nC[k,l]  ) )
	#
	
	
	
	tevents = [event[0] for event in events]
	ievents = argsort(tevents)
	
	#~ debug variables;
	coalDenied2 = 0
	migDenied = 0
	
	A = list()
	state_clades = dict([(k, list()) for k in range(fgy.m)] )
	for ievent in ievents:
		event = events[ievent]
		h = event[0]
		if event[1] == SAMPLE:
			taxon = event[2]
			istate = argmax(sampleStates[taxon])  # TODO use distribution?
			state = zeros(fgy.m)
			state[istate] = 1.
			u = Phylo.BaseTree.Clade(name = taxon)
			u.state = state
			u.height = h
			state_clades[istate].append(u)
		elif event[1] == COALESCENT:
			k,l = event[2:]
			if k==l and len(state_clades[k]) == 1:
				coalDenied2+=1
				continue
			if len(state_clades[k])==0 or len(state_clades[l])==0:
				coalDenied2+=1
				continue
			#
			iu = randint(0, len(state_clades[k]))
			u = state_clades[k].pop(iu)
			iv = randint(0,len(state_clades[l]))
			v = state_clades[l].pop(iv)
			u.branch_length = h - u.height
			v.branch_length = h - v.height
			alpha = Phylo.BaseTree.Clade(clades=[u,v])
			alpha.state = u.state
			alpha.height = h
			state_clades[k].append(alpha)
		else:#migration
			k,l = event[2:]
			if k==l or len(state_clades[k])==0:
				migDenied += 1
				#~ print 'mig denied', h, k, l
				continue
			#
			iu = randint(0, len(state_clades[k]))
			u = state_clades[k].pop(iu)
			u.state = eye(fgy.m)[l] #
			state_clades[l].append(u)
		#
		
		A.append( [h] + [len(state_clades[i]) for i in range(fgy.m)] )
	#
	clades = [u for sc in state_clades.values() for u in sc]
	trees = [Phylo.BaseTree.Tree(root=u, rooted=True, id=id(u), name=u.name) for u in clades]
	nwks = [tree.__format__('newick') for tree in trees]
	if singleMRCA and len(nwks) > 1:
		numberNewNodes = len(nwks)-1
		h1 = max([u.height for u in clades])
		t_A_interp = InterpolatedUnivariateSpline(t, At)
		haxis = linspace(h1, fgy.mostRecentTime, 100)
		A_at_haxis = t_A_interp( haxis)
		c_h_interp = InterpolatedUnivariateSpline( (max(A_at_haxis) - A_at_haxis) / (max(A_at_haxis) - min(A_at_haxis)) , haxis)
		newCtimes =  c_h_interp(uniform(size=numberNewNodes)) 
		
		for h in sorted(newCtimes):
			iu = randint(0, len(clades))
			u = clades.pop(iu)
			iv = randint(0,len(clades))
			v = clades.pop(iv)
			u.branch_length = h - u.height
			v.branch_length = h - v.height
			alpha = Phylo.BaseTree.Clade(clades=[u,v])
			alpha.state = u.state
			alpha.height = h
			clades.append(alpha)
		#~ h = mst
		#~ for u in clades:
			#~ u.branch_length = h - u.height
		#~ alpha = Phylo.BaseTree.Clade(clades=clades)
		#~ alpha.height = h
		trees = [Phylo.BaseTree.Tree(root=alpha, rooted=True, id=id(alpha), name=alpha.name)]
		nwks = [trees[0].__format__('newick')]
	
	print  len(sampleTimes) - 1, ncoalescents
	print coalDenied2, migDenied
	pdb.set_trace()
	print time.ctime(), 'end coal sim'
	
	
	return trees, nwks, array(A), daf  # state_clades,
#

def load_flatfile_TXmXm( fn):
	"""reads csv flatfile from fn with dimensions (m*T)Xm
		optionally, matrices can be separated by blank lines
	"""
	r = csv.reader(open(fn, 'r'))
	d = [row for row in r if len(row)>0]
	m = len(d[0])
	o = list()
	
	for i in range(len(d)  / m):
		b = array(  d[i*m:(i*m+m)] , dtype = float)
		o.append( b )
	#
	return o
#

def load_births( fn):
	"""Returns list of mXm numpy.array given filename fn(str). """
	return  load_flatfile_TXmXm(fn)
#
def load_migrations( fn):
	"""Returns list of mXm numpy.array given filename fn(str). """
	return load_flatfile_TXmXm(fn)
#
def load_taxis(fn):
	"""Returns numpy.array given filename fn(str). """
	return numpy.loadtxt(fn, delimiter = ',')
#
def load_prevalence( fn):
	"""Returns list of length m numpy.array given filename fn(str). """
	r = csv.reader(open(fn, 'r'))
	d = [row for row in r if len(row) > 0]
	return array(d, dtype=float)
#


if __name__=='__main__':
	usage = 'usage: $python colgem2.py treeFN sampleTimesFN sampleStatesFN taxisFN birthsFN migrationFN prevalenceFN'
	try:
		treeFN,sampleTimesFN, sampleStatesFN,  taxisFN,birthsFN, migrationFN, prevalenceFN = sys.argv[1:]
	except ValueError: 
		print usage
		exit()
	#
	births      = load_births(birthsFN)
	migrations  = load_migrations(migrationFN)
	prevalence  = load_prevalence(prevalenceFN)
	taxis       = load_taxis(taxisFN)
	nwkstring = open(treeFN, 'r').read() # 
	
	# file format should be nX2, (taxon id, value (float))
	sampleTimes = dict( [ ('%s' % row[0], float(row[1])) for row in csv.reader(open(sampleTimesFN, 'r'))  ]  )
	sampleStates = dict( [ ('%s' % row[0], array(row[1:],dtype=float) ) for row in csv.reader(open(sampleStatesFN, 'r'))  ]  ) 
	
	fgy = FGY(taxis, births, migrations, prevalence)
	ggs = GeneGenealogyStates(nwkstring, sampleTimes, sampleStates, fgy, finiteSizeCorrection = True)
	print log_likelihood(ggs)
#


