#!/usr/bin/python
'''
Performs hairpin search in parallel.
'''
import os
import sys
import logging
import subprocess
from string import Template
from processing import Process, Queue

# parameters.
utr_dir = os.path.abspath(os.sys.argv[1])
rna_dir = os.path.abspath(sys.argv[2])
result_dir = os.path.abspath(sys.argv[3])
working_dir = os.path.abspath(sys.argv[4])
script_dir = os.path.abspath(sys.argv[5])
out_file = sys.argv[6]
error_file = sys.argv[7]
num_threads = 30

# clear dirs.
subprocess.call(["rm", "-rf", "%s/*" % result_dir])
subprocess.call(["rm", "-rf", "%s/*" % working_dir])
subprocess.call(["rm", "-rf", "%s/*" % script_dir])
subprocess.call(["rm", out_file])
subprocess.call(["rm", error_file])

logging.basicConfig(filename=out_file, level=logging.DEBUG, format='[%(levelname)s] (%(threadName)-10s) %(message)s', )
#logging.basicConfig(level=logging.DEBUG, format='[%(levelname)s] (%(threadName)-10s) %(message)s', )
#logging.basicConfig(filename=error_file, level=logging.ERROR, format='[%(levelname)s] (%(threadName)-10s) %(message)s', )

# bdkbmj.
def process_pair(pair_queue, result_queue, working_dir, out_dir, script_dir):
	# loop till killed.
	while 1 == 1:
		# Wait for queue.
		logging.debug("waiting for pair.")
		pair = pair_queue.get()
		
		# search for loops.
		logging.debug("executing pita on %s, %s" % (pair[0], pair[1]))
		
		# make command.
		prefix = "%s/%s" % (out_dir, pair[0])
		tmp_dir = "%s/%s" % (working_dir, pair[0])
		cmd = [\
			"/home/jlindsay/pita/pita_prediction.pl", \
			"-utr", pair[1],\
			"-mir", pair[2],\
			"-prefix", prefix\
			]
		cmd = ' '.join(cmd)
		
		# create script.
		txt = """
#!/bin/bash
# create working dir.
mkdir $tmp_dir

# move to working dir.
cd $tmp_dir

# execute pita.
$cmd

# delete working dir.
rm ${tmp_dir}/*
rmdir ${tmp_dir}
exit;
"""
		txt = Template(txt)
		txt = txt.substitute(tmp_dir=tmp_dir, cmd=cmd)	
		
		# write command.
		script_file = "%s/%s.sh" % (script_dir, pair[0])
		fout = open(script_file, "wb")
		fout.write(txt)
		fout.close()
		
		# execute command.
		retcode = subprocess.call(["/home/jlindsay/batch_code/python_sub.sh", script_file, "/dev/null", "/dev/null"])
		
		# check for error.
		result_file = "%s_pita_results.tab" % prefix
		if retcode != 0:
			logging.error("error\t%s\t%s" % ("retcode", script_file))
			result_queue.put("")
			continue
			
		if os.path.isfile(result_file) == False:
			logging.error("error\t%s\t%s" % ("file", script_file))
			result_queue.put("")
			continue			
		
		# search for hits.
		'''
		result = ""
		fin = open(result_file, "rb")
		for line in fin:
			# tokenize.
			tmp = line.split("\t")
			if tmp[0] == "UTR": continue
			
			# get pairs.
			result += "%s\t%s\n" % (tmp[0], tmp[1])
			
		# report hits.
		'''
		result_queue.put("")

# match up utr and rna.
pairs = {}
tmp = os.listdir(utr_dir)
for t in tmp:
	# make key.
	k = t.replace("_utr.fa","")
	pairs[k] = [k, "%s/%s" % (utr_dir, t), ""]

tmp = os.listdir(rna_dir)
for t in tmp:
	# make key.
	k = t.replace("_microRNAs.fa","")
	pairs[k][2] = "%s/%s" % (rna_dir, t)
	
	
'''
## Divide up files for parallel execution.
# make dirs.
z = 80
base = "/data2/projects/tammar_analysis/gene_regulation/top100_inside/pdir"
udirs = []
rdirs = []
for i in range(z):
	name = "%s/%i" % (base, i)
	subprocess.call(["mkdir", "%s" % name])
	subprocess.call(["mkdir", "%s/rna" % name])
	subprocess.call(["mkdir", "%s/utr" % name])
	
	rdirs.append("%s/rna" % name)
	udirs.append("%s/utr" % name)

tmp = pairs.keys()
tmp.reverse()
i = 0
j = 0
for x in tmp:
	# copy files to each.
	pair = pairs[x]
	utrb = os.path.basename(pair[1])
	rnab = os.path.basename(pair[2])
	y = "%s/%s" % (udirs[i], utrb)
	e = "%s/%s" % (rdirs[i], rnab)
	
	subprocess.call(["cp", pair[1], y])
	subprocess.call(["cp", pair[2], e])
	
	j+= 1
	if j % 1000 == 0:
		print "%i of %i" % (j, len(tmp))
		
	i += 1
	if i % z == 0:
		i = 0
	
sys.exit()	
'''


# Enque pairs.
pair_queue = Queue()
for p in pairs:
	# ensure files exist.
	if os.path.isfile(pairs[p][1]) == False or os.path.isfile(pairs[p][2]) == False:
		print "bad pair"
		print pairs[p]
		sys.exit()
	
	# add to queue.
	pair_queue.put(pairs[p])
	
# Create workers and result.
result_queue = Queue()
workers = []
for i in range(num_threads):
	worker = Process(\
		target=process_pair,\
		name="consumer",\
		args=(pair_queue, result_queue, working_dir, result_dir, script_dir)\
		)
	worker.start()
	workers.append(worker)


# Wait for results to trickle in.
master = ""
for i in range(len(pairs)):
	# Wait for result.
	result = result_queue.get()
	
	# skip empty.
	if result == "": continue
	
# Close workers.
for worker in workers:
	worker.terminate()
	
