__author__ = 'jlin@systemsbiology.org'
import os
import sys
import ConfigParser
from poster.encode import multipart_encode
from poster.streaminghttp import register_openers
import urllib
import urllib2
import merge_ra_changes
try: import json #python 2.6 included simplejson as json
except ImportError: import simplejson as json

# Register the streaming http handlers with urllib2
register_openers()

sam_hash = {}
files_hash = {}
cgs_input_hash = {}
type_hash = {}
"""
def doMergeSnp(config, ycr, changesfile1, changesfile2):
	if (os.path.exists(changesfile1) and os.path.exists(changesfile2)): 
		tokens = changesfile1.split("/")
		cgskey1 = tokens[8] + "_" + tokens[5] + "_" + tokens[4]
		tokens = changesfile2.split("/")
		cgskey2 = tokens[8] + "_" + tokens[5] + "_" + tokens[4]  
		igvMutationSourcePath = config.get("cgs_lims", "igvMutationSourcePath")
		mergedsnpsfile = merge_ra_changes.doMerge(igvMutationSourcePath, ycr, changesfile1, changesfile2)
		print "mergedSNPsFile %s" % mergedsnpsfile 
		limsControllerUrl = config.get("cgs_lims", "limsMergeSnpUrl")
	else:
		print "WARNING: Either %s or %s does not exists as a file" %(changes1, changes2)
"""
def getAlignedPath(dir):
        list = dir.rsplit("/",1)
	#icgs_input_hash["comments"] = "build version:" + list[1]
        return list[0]

def renamedBuildPath(dir, aligneddir, key):
        os.rename(dir, aligneddir + "/" + key)

def initCGS(config, dir):
	global cgs_input_hash, type_hash
	cgs_input_hash["user"] = config.get("cgs_lims", "user")
	cgs_input_hash["password"]=config.get("cgs_lims", "password")
	list = dir.split("/")
	if (len(list) >= 9):
	#/solexa/dudley/genome_assembly/YO502/YSEQ4/unprocessed_reads/alignment_1/build_26-Combined
		cgs_input_hash["YCGSSeqRun"] = list[5]
		cgs_input_hash["YCGSStrain"] = list[4]
		cgs_input_hash["YCGSComments"] = "Build version:" + list[8]
	type_hash["synonymous"] = "Synonymous"
	type_hash["nonsynonymous"] = "Missense"	
	type_hash["aa_deletion"] = "Indel"
	type_hash["frameshift"] = "Nonsense"
	type_hash["INTERGENIC"] = "Unknown"
        type_hash["aa_insertion"] = "Indel"
	#need to account for Target_Region, Splice_site

def doMergeSnp(config, ycr, changesfile1, changesfile2):
	global cgs_input_hash
	initCGS(config, "")
	cgs_input_hash["YCR"] = ycr
	if (os.path.exists(changesfile1) and os.path.exists(changesfile2)):
		tokens = changesfile1.split("/")
		cgskey1 = tokens[8] + "_" + tokens[5] + "_" + tokens[4]
		tokens = changesfile2.split("/")
		cgskey2 = tokens[8] + "_" + tokens[5] + "_" + tokens[4]
		igvMutationSourcePath = config.get("cgs_lims", "igvMutationSourcePath")
		mergedsnpsfile = merge_ra_changes.doMerge(igvMutationSourcePath, ycr, changesfile1, changesfile2, cgs_input_hash)
		print "mergedSNPsFile %s" % mergedsnpsfile
		datagen, headers = multipart_encode(cgs_input_hash)
		limsControllerUrl = config.get("cgs_lims", "limsSnpMergeUrl")
		request = urllib2.Request(limsControllerUrl, datagen, headers)
		results = json.load(urllib2.urlopen(request))
		print ' status:' + results['status']
	else:
		print "WARNING: Either %s or %s does not exists as a file" %(changesfile1, changesfile2)

def populateCoverageSummary(data):
	global cgs_input_hash
	l = 0
	percentage = ''
	coverage = ''
	ploidy = ''
	ploidyNotes = ''
	for line in data:
		if l != 0:
			chrLine = line.split("\t")
			percentage = percentage + chrLine[2] + ","
			coverage = coverage + chrLine[3] + ","
			if (chrLine[0] != "chrM") and (chrLine[0] != "2micron"):
				if int(chrLine[4]) != 1:
					#print chrLine[0] + " chr[4] " + chrLine[4]
					ploidy = 'Aneuploid'
					ploidyNotes = ploidyNotes + " " + chrLine[0] + " chr_copy " + chrLine[4]   	
		l = l + 1
	#print "percentage:" + percentage + "\ncoverage: " + coverage + "\nploidy:" + ploidy + " " + ploidyNotes 
	
	cgs_input_hash["YCGSPercentage"] = '[' + percentage[0:len(percentage)-1] + ']'
	cgs_input_hash["YCGSCoverage"] = '[' + coverage[0:len(coverage)-1] + ']'
	cgs_input_hash["ploidy"] = ploidy
	cgs_input_hash["ploidyNotes"] = ploidyNotes

def processClassificationMutations(classificationData, datapath):
	global cgs_input_hash, files_hash, type_hash
	classmut = open(datapath + "/RA_changes_classification.mut", "w")
	for data in classificationData:	
		linetk = data.split("\t")
		type = ""
		try:
			type = type_hash[linetk[9]]
		except KeyError:
			print sys.exc_type
			type = "Unknown"
			pass
			#else: raise
		classmut.write(linetk[0] + "\t" + linetk[1] + "\t" + linetk[2] + "\t" + cgs_input_hash["YCGSStrain"] + "_" + linetk[9] + "\t" + type + "\t" + linetk[8] + "_" + linetk[-2] + "_" + linetk[-1])
	classmut.close()
	#add pf_key to cgs_input_hash
	#cgs_input_hash["YCGSSeqRun"] + "_" + cgs_input_hash["YCGSStrain"] + "_RA_changes_classification"
	#files_hash[cgs_input_hash["YCGSSeqRun"] + "_" + cgs_input_hash["YCGSStrain"] + "_RA_classes"] = "RA_changes_classification.mut"
	
def createMutationSymlink(src, dst, linkname, dstname):
	global sam_hash
	srcname = os.path.join(dst, dstname)
        symname = os.path.join(src, linkname + ".mut")
	print "source_file %s symlink %s" %(srcname, symname)
	os.symlink(srcname, symname)
	
def createBamSymlink(src, dst, linkname):
	#symlink for bam/bai
	bamlink = os.path.join(src, linkname + ".bam")
	print "create bam symlink:" + bamlink
	os.symlink(sam_hash["cgsbam"], bamlink)
	bailink = os.path.join(src, linkname + ".bai")
	os.symlink(sam_hash["cgsbai"], bailink)

def cgs_submit(datapath, symlink_webpath, config):
	global cgs_input_hash, sam_hash, files_hash, type_hash
	alignedPath = getAlignedPath(datapath)
	initCGS(config, datapath)	
	#build bam and bai json
	sam_hash["cgsbam"] = alignedPath + "/aligned_reads/aligned_reads_sorted_IGV.bam"
	sam_hash["cgsbai"] = alignedPath + "/aligned_reads/aligned_reads_sorted_IGV.bam.bai"	
	for (root, dirs, files) in os.walk(datapath):
		for file in files:
			print "create json for file:" + file
			files_hash[cgs_input_hash["YCGSSeqRun"] + "_" + cgs_input_hash["YCGSStrain"] + "_" + file.split(".")[0]] = file
										
	#cgs_input_hash["user"] = config.get("cgs_lims", "user")
	#cgs_input_hash["password"]=config.get("cgs_lims", "password")
	cgs_input_hash["YCGSDudleyProject"] ="Dudley Paired End"
	cgs_input_hash["cgsbam"] = sam_hash["cgsbam"]
	cgs_input_hash["cgsbai"] = sam_hash["cgsbai"]
	for pf_key in files_hash:
		filedata = open(datapath + "/" + files_hash[pf_key])		
		cgs_input_hash[pf_key] = filedata
		print "key:" + pf_key
		if (pf_key.find("coverage_summary") > -1):
			populateCoverageSummary(filedata)
		if (pf_key.find("changes_classification") > -1):
			processClassificationMutations(filedata, datapath)
	files_hash[cgs_input_hash["YCGSSeqRun"] + "_" + cgs_input_hash["YCGSStrain"] + "_RA_changes_classification"] = "RA_changes_classification.mut"

	#cgs_input_hash = {"ra_changes":open(datapath + "/RA.mut"), "bd":open(datapath + "/IA.mut"), "dn_changes":open(datapath + "/DN.mut"), "user":"gcromie", "password":"cgs_s9bm!t","YCGSSeqRun":"YSEQ4", "YCGSStrain":"YO_486", "YCGSDudleyProject":"Dudley Paired End", "cgsbam":sam_hash["cgsbam"], "cgsbai":sam_hash["cgsbai"]}
	
	# headers contains the necessary Content-Type and Content-Length
	# datagen is a generator object that yields the encoded parameters
	datagen, headers = multipart_encode(cgs_input_hash)
	limsControllerUrl = config.get("cgs_lims", "limsSnpSubmitUrl")
	request = urllib2.Request(limsControllerUrl, datagen, headers)
	results = json.load(urllib2.urlopen(request))
	cgsKey = results['YCGSKey']
	print 'cgsKey ' + cgsKey + ' status:' + results['status']
	f = open(datapath + '/' + cgsKey + '.dat', 'w')
	f.write(json.dumps(results))
	f.close()
	renamedBuildPath(datapath, alignedPath, cgsKey)
	igvMutationSourcePath = config.get("cgs_lims", "igvMutationSourcePath")
	mutationsFound = 0
	for kv in files_hash:
		if (files_hash[kv].find(".mut") > -1):
			print "create symlinks for key %s mut file %s" % (kv, files_hash[kv])	
			createMutationSymlink(igvMutationSourcePath, alignedPath + "/" + cgsKey, cgsKey + "_" + kv, files_hash[kv])
			mutationsFound = 1
	if mutationsFound == 0:
		sys.exit("\nError: Build path " + datapath + "does not contain any mutation files\n")
	
	createBamSymlink(igvMutationSourcePath, alignedPath + "/" + cgsKey, cgsKey + "_" + cgs_input_hash["YCGSSeqRun"] + "_" + cgs_input_hash["YCGSStrain"])

def cgs_strain2barcode(seqkey, dest, config):
	print "begin cgs_strain2barcode"
	params = urllib.urlencode({'seqkey': seqkey, 'user':config.get("cgs_lims", "user"), 'password':config.get("cgs_lims", "password")})
	limsControllerUrl = config.get("cgs_lims", "limsBarcodeUrl")
	request = urllib.urlopen(limsControllerUrl, params)
	results = json.load(request)
	strainFile = open(dest + "/" + seqkey + "_strain2tag.tsv", "w")
	strainFile.write("#"+ results["flowcell"] + "\n")
	for res in results:
		if (res.find("YPG") > -1 or res.find("YO") > -1 or res.find("YAD") > -1):
			print res + "\t" + results[res]
			strainFile.write(res + "\t" + results[res] + "\n")


if __name__ == "__main__":
	mode = sys.argv[1]
	config = ConfigParser.RawConfigParser()
	if (mode.startswith("dev")):	
		config.read('./dev_lims.config')
		mode = mode.split("dev_")[1]
	else:
		config.read('./cgs_lims.config')
        if (mode == "strain2bc"):
		print "get barcodes and strains for:" + sys.argv[2]
		cgs_strain2barcode(sys.argv[2], sys.argv[3], config)
	if (mode == "cgssubmit"):
		print "submit cgs mutation findings at path:" + sys.argv[2]
		if (not os.path.exists(sys.argv[2])):
			sys.exit("\nError: Build path " + sys.argv[2] + " does not exist or program does not have appropriate permissions\n")
		cgs_submit(sys.argv[2], "symlink_webpath_tomcat", config)
	if (mode == "mergesnp"):
		if (len(sys.argv) != 5):
			print "Requires YCR Parent1_ra_changes.txt Parent2_ra_changes.txt %s" % "\t".join(sys.argv)
		else:
			doMergeSnp(config,sys.argv[2], sys.argv[3],sys.argv[4]) 
