from __future__ import with_statement # 2.5 only
import threading
import pickle
import subprocess
import os
import mig.core
import uuid
import tempfile
import time
import datetime
import shutil
import xml.dom.minidom
import locale
import sys

# ====================
# = Global variables =
# ====================

# This lock protects the copying of CSPBuilder to MiG
CSPBUILDER_LOCK = threading.Lock()
LOG_LOCK = threading.Lock()

# Tells us if CSPBuilder has been copied to MiG.
CSPBUILDER_COPIED = False
# Do we emit debug messages
PRINT_DEBUG = False
# Do we log MiG statistics
LOG_STATS = True
# Do we compress input and output for the channels
COMPRESS = False

XML_SKELETON = """<?xml version="1.0" encoding="utf-8"?>
<doc>
  <process name="untitled" package="default">
	<channel name="chan0" type="unknown">
	  <connection direction="out" target="MiG_Wrapper.out0"/>
	  <connection direction="in" target="__CHANNEL_IN__"/>
	</channel>
	<channel name="chan1" type="unknown">
	  <connection direction="out" target="__CHANNEL_OUT__"/>
	  <connection direction="in" target="MiG_Wrapper.in0"/>
	</channel>
	<processlink file="Components/MiG_Wrapper.csp" name="MiG_Wrapper" process="MiG_Wrapper"/>
	<processlink file="__PROCESS_LOCATION__" name="__PROCESS_NAME__" process="__PROCESS__"/>
  </process>
</doc>
"""

	
COMMMAND = """tar -zxf CSPBuilder.tar.gz
cp $CSP2MIG_WORKING_FOLDER/$CSP2MIG_APP_NAME Applications
python execute.py -dl Applications/$CSP2MIG_APP_NAME"""

CSPBUILDER_ARCHIVE = 'CSPBuilder.tar.gz'
DEFAULT_VGRID = 'Generic'

LOG_FILE_CREATED = False
LOG_FILE_NAME = 'csp2mig.log'

MIG_DATE_FORMAT = '%a %b %d %H:%M:%S %Y'
def consider_copying_cspbuilder(local_tempdir):
	"""Copies CSPBuilder to MiG, if this hasn't been done yet in this session."""
	global CSPBUILDER_COPIED
	with CSPBUILDER_LOCK:
		if not CSPBUILDER_COPIED:			
			# we do this maximum once per session
			if PRINT_DEBUG:
				print 'Copying CSPBuilder to MiG'
			global CSPBUILDER_ARCHIVE
			archive_proc = subprocess.Popen('tar -z -cf ' + local_tempdir +'/'+ CSPBUILDER_ARCHIVE + ' .' ,
										shell = True,
										stdout=subprocess.PIPE)
			archive_output = archive_proc.communicate()[0]
			mig.core.migput(local_tempdir +'/'+ CSPBUILDER_ARCHIVE, CSPBUILDER_ARCHIVE)
			CSPBUILDER_COPIED = True


def print_logs(logs):
	global LOG_FILE_CREATED
	with LOG_LOCK:
		if not LOG_FILE_CREATED:
			# create new file
			f = open('csp2mig.log','w')
			# write columns
			f.write('local_id\tmig_id\tlocal_total_time\tcsp_process_time\treceived\tqueued\texecuting\tfinished\tmig_wait_time\tmig_queue_time\tmig_exec_time\tmig_total_time\n')
			LOG_FILE_CREATED = True
		else:
			# append
			f = open('csp2mig.log','a')	
		
		loc = locale.getlocale() # get current locale
		locale.setlocale(locale.LC_TIME, 'en_US') # use default US locale, to be sure we can parse time
		
		for log in logs:
			if log.has_key('local_id'):
				f.write(log['local_id'] + '\t')
			else:
				f.write(str(-1) + '\t')
			
			if log.has_key('mig_id'):
				f.write(log['mig_id'] + '\t')
			else:
				f.write(str(-1) + '\t')
		
			if log.has_key('total_time_secs'):
				f.write(str(log['total_time_secs']) + '\t')
			else:
				f.write(str(-1) + '\t')
				
			if log.has_key('mig_time_secs'):
				f.write(str(log['mig_time_secs']) + '\t')
			else:
				f.write(str(-1) + '\t')
				
			recv_time = None
			if log.has_key('received'):
				f.write(log['received'] + '\t')
				recv_time = datetime.datetime.strptime(log['received'], MIG_DATE_FORMAT)
			else:
				f.write('missing\t')
			
			queue_time = None
			if log.has_key('queued'):
				f.write(log['queued'] + '\t')
				queue_time =  datetime.datetime.strptime(log['queued'], MIG_DATE_FORMAT)
			else:
				f.write('missing\t')

			exec_time = None
			if log.has_key('executing'):
				f.write(log['executing'] + '\t')
				exec_time =  datetime.datetime.strptime(log['executing'], MIG_DATE_FORMAT)				
			else:
				f.write('missing\t')

			finish_time = None
			if log.has_key('finished'):
				f.write(log['finished'] + '\t')
				finish_time =  datetime.datetime.strptime(log['finished'], MIG_DATE_FORMAT)
			else:
				f.write('missing\t')
			
			if recv_time != None and queue_time != None and exec_time != None and finish_time != None:
				mig_wait_time = queue_time - recv_time
				mig_queue_time = exec_time - queue_time
				mig_exec_time = finish_time - exec_time
				mig_total_time = finish_time - recv_time				
				f.write(str(mig_wait_time.seconds) + '\t')
				f.write(str(mig_queue_time.seconds) + '\t')
				f.write(str(mig_exec_time.seconds) + '\t')
				f.write(str(mig_total_time.seconds) + '\t')
			else:
				f.write('missing\tmissing\tmissing\tmissing\t')
				
		f.write('\n')
		f.close()
		locale.setlocale(locale.LC_ALL, loc) # restore saved locale
class Migclient:
	def __init__(self):
		self.created_folders = []
		self.log = {}

	def create_path_leading_to_file(self, relative_file_path):
		parts = relative_file_path.split('/')
		path = self.working_folder
		for i in range(len(parts) - 1):
			path += '/' + parts[i]
			# check if the path is already created
			if not path in self.created_folders:
				self.created_folders.append(path)
				mig.core.migmkdir(path)
		

	def run_job(self,mig_input, local_tempdir, context):
		if LOG_STATS:
			start_time = time.time()
		# load context vars
		process = context['process']
		name = context['process_name']
		chanin = context['in_channel']
		chanout = context['out_channel']
		location = context['process_location']
		processlink_child_nodes = context['send_to_grid_child_nodes']
		vgrid = context['vgrid']
		if vgrid == None:
			vgrid = DEFAULT_VGRID
		input_files = context['input_files']
		output_files = context['output_files']
		
		# create unique job ids	
		job_uid = uuid.uuid4().hex + str(threading._get_ident())
		self.working_folder = job_uid
		input_file = job_uid + '.in'
		output_file = job_uid + '.out'
		app_name = job_uid + '.csp'
	
		
		# check for tag
		if isinstance(mig_input,Taggedvalue):
			value = mig_input.value
			uses_tag = True
		else:
			value = mig_input
			uses_tag = False
	
		# dump input to file
		if COMPRESS:
			if PRINT_DEBUG:
				print 'sending compressed output'
			mig.core.pickle_compressed(local_tempdir + '/' + input_file, value)
		else:
			f = open(local_tempdir + '/' + input_file,'w')
			pickle.dump(value, f)
			f.close()
	
		# create .csp-file
		csp_xml = XML_SKELETON.replace('__PROCESS__',process,1)
		csp_xml = csp_xml.replace('__PROCESS_NAME__',name,1)
		csp_xml = csp_xml.replace('__PROCESS_LOCATION__',location,1)
		csp_xml = csp_xml.replace('__CHANNEL_IN__',chanin,1)
		csp_xml = csp_xml.replace('__CHANNEL_OUT__',chanout,1)
		
		# insert any childnodes of the original .csp file
		# todo this is a bit hacky!		
		if processlink_child_nodes != []:
			dom = xml.dom.minidom.parseString(csp_xml)
			for node in dom.getElementsByTagName('processlink'):
				if node.attributes['name'].value == name:
					for child in processlink_child_nodes:
			 			newNode = dom.importNode(child,True)
						node.appendChild(newNode)
			csp_xml = dom.toxml()
		
		f = open(local_tempdir + '/' + app_name,'w')
		f.write(csp_xml)
		f.close()
		
		

		# create job
		job = mig.core.Migjob(COMMMAND,vgrid, job_uid, local_tempdir)
		
		# add the runtime environment
		job.add_runtime_env('PYTHON-2.5')
		
		# add input files
		job.add_input_file(self.working_folder + '/' + input_file)
		job.add_input_file(self.working_folder + '/' + app_name)
		job.add_input_file(CSPBUILDER_ARCHIVE)
			
		for local, remote in input_files:
			# if the local filename is empty, this means that the file is present on the MiG server
			# and it cannot be located in the working folder.
			if local.strip() != '':
				job.add_input_file(self.working_folder + '/' + remote)
			else:
				job.add_input_file(remote)
	
		# add output files
		job.add_output_file(self.working_folder + '/' + output_file)
	
		for local, remote in output_files:
			job.add_output_file(self.working_folder + '/' + remote)
	
		# add additional settings
		
		if context.has_key('cpu_time'):
			job.set_cpu_time(context['cpu_time'])
			
		if context.has_key('cpu_count'):
			job.set_cpu_count(context['cpu_count'])
			
		if context.has_key('node_count'):
			job.set_node_count(context['node_count'])
			
		if context.has_key('memory'):
			job.set_memory(context['memory'])

		if context.has_key('disk'):
			job.set_disk(context['disk'])

		if context.has_key('arch'):
			job.set_architecture(context['arch'])
			
	
		# add environment variables
		job.add_env_var('CSP2MIG_WORKING_FOLDER',self.working_folder)
		job.add_env_var('CSP2MIG_INPUT',input_file)
		job.add_env_var('CSP2MIG_OUTPUT',output_file)
		job.add_env_var('CSP2MIG_APP_NAME',job_uid + '.csp')
		job.add_env_var('CSP2MIG_COMPRESS',str(COMPRESS))
					
		job_submitted = False
		try:
			# create working folder
			mig.core.migmkdir(self.working_folder)
			# copy input file and app to MiG
			mig.core.migput(local_tempdir + '/' + input_file, self.working_folder +'/' + input_file)
			mig.core.migput(local_tempdir + '/' + app_name,self.working_folder+'/'+app_name)
		
			# copy  the users input files
			for local, remote in input_files:
				# if the local filename is empty, this means that the file is present on the MiG server
				if local.strip() != '':
					# create the path to the file if needed
					self.create_path_leading_to_file(remote)
					mig.core.migput(local, self.working_folder + '/' + remote)
			
			
			job.submit()
			job_submitted = True
			# wait for job to finish - poll every 5 seconds
			if job.wait(5):
				# get output files to local storage
				# migget is at bit strange. It preserves the original filename from MiG.
				# We need to create a directory for dumping output files				
				outdir = local_tempdir + '/' + job_uid
				os.mkdir(outdir)			
				for local, remote in output_files:
					mig.core.migget(self.working_folder + '/' + remote, outdir)
					path_parts = remote.split('/')
					file_name = path_parts[len(path_parts)-1]
					# finally copy the file. That was a lot of work....
					shutil.copyfile(outdir + '/' + file_name, local)
				# get result file
				mig.core.migget(self.working_folder + '/' + output_file, local_tempdir)
				
				try:
					if COMPRESS:
						(mig_output, error), MiG_time = mig.core.unpickle_compressed(local_tempdir + '/' + output_file)
					else:					
						result_file = open(local_tempdir + '/' + output_file, 'r')
						(mig_output, error), MiG_time = pickle.load(result_file)
						result_file.close()
				except:
					mig.core.print_debug('Loading result failed: ' + str(sys.exc_info()),job)
					return (None, False)
					
					
				success = (error == None)
				
				if not success:
					mig.core.print_debug('An error occurred while executing on MiG-resource: ' + error, job)

				if LOG_STATS:
					total_time = time.time() - start_time
					# inherit and expand the jobs log.
					self.log = job.log
					self.log['total_time_secs'] = total_time
					self.log['mig_time_secs'] = MiG_time
					mig.core.print_debug('Total time: ' + str(total_time),job)
					mig.core.print_debug('Execution time on MiG: ' + str(MiG_time),job)
			
				if uses_tag:
					return (Taggedvalue(mig_output, mig_input.tag), success)
				else:
					return (mig_output, success)
				
			else:
				mig.core.print_debug('Job did not finish',job)
				return (None, False)
		finally:
			# clean up on MiG
			mig.core.migrm(self.working_folder, recursive=True)					
			if job_submitted:
				job.cleanup()

# ==========================================================================
# = This class allows us to tag a value, for in-order delivery of results. =
# ==========================================================================
class Taggedvalue:
	def __init__(self, value, tag):
		self.value = value
		self.tag = tag				
		
