#!/usr/bin/python
'''
Translates SAM to HAM format.
'''
import sys
import ham
import h5py
import numpy as np
import time
import logging
from processing import Process, Queue

# parameters.
sam_file = sys.argv[1]
ham_file = sys.argv[2]
num_threads = int(sys.argv[3])

#buffer = 32212254720		# 30GB
#buffer = 10737418240		# 10GB
buffer = 1073741824			# 1GB
#buffer = buffer / 100
logging.basicConfig(level=logging.DEBUG, format='[%(levelname)s] (%(threadName)-10s) %(message)s', )

'''
Turns chunk of sam data in ham array.
ho is effecitvely a local copy of the ham_object, no changes will effect 
copy in the global scope.
'''
def consume_sam_chunk(ho, cq, hq):
	# process all chunks.
	while 1 == 1:
		# wait for a chunk.
		logging.debug("waiting for sam chunk.")
		lines = cq.get()
		
		# check for termination.
		if lines == False: 
			logging.debug("sam thread terminating.")
			break
		
		# instantiate a ham array.
		logging.debug("creating ham chunk.")
		sz = len(lines)
		ham_arr = ho.create_array(sz)
		
		# loop over chunk.
		logging.debug("processing chunk.")
		for i in range(sz):
			
			# skip headers.
			if lines[i][0] == "@": continue
					
			# process required part of line.
			tmp = lines[i].strip().split("\t")
			ham_arr[i]['QNAME'] = tmp[0]
			ham_arr[i]['FLAG'] = tmp[1]
			ham_arr[i]['RNAME'] = tmp[2]
			ham_arr[i]['POS'] = tmp[3]
			ham_arr[i]['MAPQ'] = tmp[4]
			ham_arr[i]['CIGAR'] = tmp[5]
			ham_arr[i]['RNEXT'] = tmp[6]
			ham_arr[i]['PNEXT'] = tmp[7]
			ham_arr[i]['TLEN'] = tmp[8]
			ham_arr[i]['SEQ'] = tmp[9]
			ham_arr[i]['QUAL'] = tmp[10]
			
			# throw the rest into optional.
			if len(tmp) > 10:
				ham_arr[i]['OPTIONAL'] = '\t'.join(tmp[11::])
	
		# send ham array back.
		hq.put(ham_arr)
	
		# delete chunk.
		del lines
	
# turns a chunk of sam into a ham array.
def consume_ham_chunk(ho, hq, fq):
	# process all ham arrays.
	while 1 == 1:
		# wait for ham chunk.
		logging.debug("waiting for ham chunk.")
		ha = hq.get()
		
		# check for termination.
		if ha == False: 
			logging.debug("ham thread terminating.")
			ho.close()
			break
		
		logging.debug("h5 size before: %i" % ham_obj.active_idx)
		logging.debug("ham_size %i." % ha.size)
		
		# write ham chunk to disk.
		logging.debug("saving ham chunk.")
		ham_obj.save_array(ha)
		
		logging.debug("h5 size after: %i" % ham_obj.active_idx)
		logging.debug(ham_obj.data['QNAME'][0])
		
		# write an indicator it was finished.
		fq.put(True)

# setup simple structures.
logging.debug("Setting up data structures.")
ham_obj = ham.HamWrite(ham_file, ham.ham_split_dt)
lines = []
sz = len(lines)
i = 0
j = 0
chunk_cnt = 0
init_time = time.time()

# setup parallel queues.
sam_queue = Queue()
ham_queue = Queue()
#final_queue = Queue()

# setup a ham writing thread.
'''
ham_thread = Process(\
				target=consume_ham_chunk,
				name="consume_ham_chunk",\
				args=(ham_obj, ham_queue, final_queue))
ham_thread.start()
'''

# setup sam_consumer threads.
sam_threads = []
for z in range(num_threads):
	worker = Process(\
				target=consume_sam_chunk,
				name="consume_sam_chunk",\
				args=(ham_obj, sam_queue, ham_queue))
	worker.start()
	sam_threads.append(worker)


# read in SAM file.
fin = open(sam_file, "rb")
while 1 == 1:		
	# read appropriate chunk.
	logging.debug("reading sam chunk %i." % chunk_cnt)
	lines = fin.readlines(buffer)
	
	# check if we are done.
	if lines == []: break
	
	# enqueue chunk.
	logging.debug("enqueuing sam chunk %i." % chunk_cnt)
	sam_queue.put(lines)
	chunk_cnt += 1
		
# wait for same number of chunks to be in finished queue.
for z in range(chunk_cnt):
	# wait for a processed ham.
	logging.debug("waiting on %i ham chunks." % (chunk_cnt - z))
	ha = ham_queue.get()
	
	# save to disk.
	logging.debug("saving chunk.")
	ham_obj.save_array(ha)
		
# terminate sam threads.
logging.debug("initiating termination of sam threads.")
for t in sam_threads:
	sam_queue.put(False)
for t in sam_threads:
	t.join()
		
# get ham_object.
'''
logging.debug("terminating ham thread.")
ham_queue.put(False)
ham_thread.join()
'''

# close ham obj.
ham_obj.close()
	
# done.
logging.debug("total time: %f" % (time.time() - init_time))
