"""
JAKE ELLOWITZ
Data analysis software for the 2D ising simulation (but can be generalized)
The program assumes nucleation occurs, so T must be less than Tc in the sim.

Final analyzed data is output in the form h, t, sigt, chi, sigchi in the
./cdata/basename.ext

The limits and such are determined from a 128x128 lattice, and should be 
modified according to the size of the lattice used in the simulations.

The raw_stats list is indexed by nucleation time, chi
The raw metastable data is time, m
The final compounded stats list is indexed by t, sigt, chi, sigchi
The index for the points are lists: [method, h, t, sigt, chi, sigchi]
"""

import sys, os, pylab, numpy

class data:
	
	def __init__\
		(self,\
		data_dir='./data/',\
		beginning_chop=50,\
		min_chop_m4=0.965,\
		final_end_chop=100):

		self.data_dir = data_dir
		self.beginning_chop = beginning_chop
		self.min_chop_m4 = min_chop_m4
		self.final_end_chop = final_end_chop

	def extract_basename (self, files, basename, h, method='ran'):
		"""
		Return an array of all sub-files of files input supporting the
		basename, h, and method inputs.
		"""
		temp_arr = []
		for file in files:
			if basename in file:
				if str(h) in file:
					if method in file:
						temp_arr.append (file)
		return temp_arr

	def crop_data (self, uncropped_data):
		"""
		Initially parse the data. Grap h and tau while we're at it.
		"""
		data = []
		temp_data = uncropped_data.splitlines ()
		for td in temp_data:
			temp_partial = td.split ('\t')
			data.append ([\
				int(temp_partial[4]),\
				float(temp_partial[2]),\
				])
		# We can just take the last partial piece of data for the temperature and
		# magnetic field, as these are constant in each simulation
		return float(temp_partial[0]), float(temp_partial[1]), data

	def chop_data (self, unchopped_data):
		"""
		The second stage of data parsing. find the nucleation time and step
		back a little bit from there. Return the parsed data.
		"""
		# We can exaggerate the nucleation by looking at m^4
		# Start at the end of the list, and only keep everything to the left of
		# the leftmost cutoff.
		seq = range (len(unchopped_data))
		seq.reverse ()
		for i in seq:
			if abs(unchopped_data[i][1]**4) < abs(self.min_chop_m4):
				end_chop = i
		if end_chop - self.final_end_chop > self.beginning_chop:
			end_chop -= self.final_end_chop
			# Now chop off the beginning
			return numpy.array(unchopped_data[self.beginning_chop:end_chop])
		else:
			return numpy.array([])
		return [t, sigt, chi, sigchi]

	def get_raw_metastable_data (self, basename, h, method):
		"""
		Return all of the data arrays in a head array satisfying h, method.
		"""
		files =\
			self.extract_basename (os.listdir (self.data_dir), basename, h, method)
		# Indicate to any method calling this program that the requested file
		# criterion is invalid
		if not files:
			return False, False, False

		data_head = []

		for file in files:
			f = open (self.data_dir+file, 'r')
			# read all data into an array
			h, tau, data = self.crop_data (f.read())
			data_head.append (self.chop_data (data))
		return data_head
	
	def just_ms (self, basename, h, method):
		"""
		Extract the magnetization data only from the metastable data
		"""
		temp = []
		data_head = self.get_raw_metastable_data (basename, h, method)
		for data in data_head:
			# Ignore the runs with no results
			if data.any ():
				# Extract the m only
				temp.append (list(numpy.array (data)[:,1]))
		return temp

	
class point (data):
	
	def __init__ (self):
		# Initialize the parent class
		data.__init__ (self)

	def get_stats (self, h, tau, data):
		# Only return values if there are values to analyze..
		if len(data):
			tf = data[len(list(data))-1,0]
			chi = numpy.var (data[:,1])/tau
			return numpy.array ([tf, chi])
		else:
			return numpy.zeros (2)

	def compound_stats (self, raw_stats):
		raw_stats = numpy.array(raw_stats)
		t = numpy.mean (raw_stats[:,0])
		sigt = numpy.std (raw_stats[:,0])
		chi = numpy.mean (raw_stats[:,1])
		sigchi = numpy.std (raw_stats[:,1])
		return [t, sigt, chi, sigchi]

	def get_point (self, basename, h, method):
		
		data_head = self.get_raw_metastable_data (basename, h, method)
		# Indicate to any method calling this program that the requested file
		# criterion is invalid
		if not h and not tau and not data_head:
			return False, False, False, False

		raw_stats = []

		for data in data_head:
			raw_stats.append (list(self.get_stats (h, tau, data)))

		stats = self.compound_stats (raw_stats)
		#return h, tau, stats[0], stats[1], stats[2], stats[3]
		# Don't need to return h, we provide it to this function!
		return stats[0], stats[1], stats[2], stats[3]

class results:
	
	def __init__ (self, basename, hmin, hmax, hstep):
		"""
		The index for the points are lists: [method] [h, t, sigt, chi, sigchi]
		"""
		self.methods = ['ran', 'chk', 'chr']
		self.p = point ()
		self.points = {}
		# 3 methods
		for m in self.methods:
			self.points[m] = []
			# Make it a 5 column matrix for each data type
			for i in xrange (0, 5):
				self.points[m].append ([])

		self.hs = []
		for h in numpy.arange (hmin, hmax, hstep):
			self.hs.append (h)

		self.basename = basename
		self.odata_dir = './cdata/'
	
	def append_point (self, points, h, t, sigt, chi, sigchi):
		points[0].append (h)
		points[1].append (t)
		points[2].append (sigt)
		points[3].append (chi)
		points[4].append (sigchi)
		return points

	def make_points_list (self):
		for m in self.methods:
			for h in self.hs:
				# Getting a point
				t, sigt, chi, sigchi = self.p.get_point (self.basename, h, m)
				if not t and not sigt and not chi and not sigchi:
					print 'Missing file information for run h='+str(h)+', m='+str(m)
				else:
					self.points[m] =\
						self.append_point (self.points[m], h, t, sigt, chi, sigchi)

	def points_file_out (self, points):
		for m in self.methods:
			if points[m]:
				f = open (self.odata_dir+self.basename+'.'+m, 'w')
				# Each points [m][X] have same length..
				for i in xrange (0, len (points[m][0])):
					f.write (
							str (points[m][0][i]) + '\t'
						+ str (points[m][1][i]) + '\t'
						+ str (points[m][2][i]) + '\t'
						+ str (points[m][3][i]) + '\t'
						+ str (points[m][4][i]) + '\n'
					)
				f.close ()

class correlation (data): 

	def __init__ (self, tmax=200):
		"""
		To get the average we can use <cx+y> = <cx>+<y> = c<x>+<y>
		"""
		# initialize the parent class
		data.__init__ (self)
		self.tmax = tmax
		self.c = [0.]*self.tmax
		self.cnum = [0]*self.tmax
		# Cannot initialize this way: self.c = [[]]*self.tmax
		#for i in xrange (0, self.tmax):
		#	self.c.append ([])
		#	self.cnum.append ([])

	def c_mean (self, t):
		return self.c[t]/self.cnum[t]

	def corr (self, data):
		"""
		Update to class-global c
		"""
		tf = len(data)
		# Initialize the correlation list
		for t in xrange (0, self.tmax):
			# reset the tracker
			temp = 0.
			# len(data) is max iteration time. Make sure all instances have same
			# number of points.
			for tp in xrange (0, tf-t):
				self.c[t] += data[tp]*data[tp+t]
				self.cnum[t] += 1
	
	def plot_corr (self, c):
		pylab.plot (c)

		pylab.grid ()
		pylab.show ()

	def ctrl (self, basename='trial', h=0.41, method='chk'):
		c_av = [0.]*self.tmax
		data_line = []

		data_head = self.just_ms (basename, h, method)
		for data in data_head:
			data_line += data
		# Get variance and such from all data
		mav = numpy.mean (data_line)
		mvar = numpy.var (data_line)
		
		for i in xrange (0, len(data_head)):
			# Updates class-global c
			self.corr (data_head[i])
			print 'data '+str(i)+' is done.'
		# Get the average per time step
		for t in xrange (0, self.tmax):
			# Calculate the mean of c[t]
			# Use custom optimized mean calculator here. This can be made generic.
			ctemp = self.c_mean (t)
			print ctemp
			c_av[t] = (ctemp - mav**2)/mvar
		self.plot_corr (c_av)
	
