#!/usr/bin/env python
"""
Comparing association outputs between rf-ace runs, requires user to indicate results path1, path2, numberOfEdgesToCheck, numberOfFiles
The files will be randomly picked, Use -h for details on not using random and passing in own range or list of files
"""

import sys
import os
import random
import ConfigParser
import optparse
import time
import quantile

__author__ = "Jake Lin"
__copyright__ = "Copyright 2011, The Cogent Project"
__credits__ = ["Jake Lin"]
__license__ = "GPL"
__version__ = "0.1"
__maintainer__ = "Jake Lin"
__email__ = "jakelin@systemsbiology.org"
__status__ = "Dev"

dualHash = {}
predictorHash = {}
distinctSources = {}
upperBound = 0
sources_array = []
summary = None

def random_generator():
    last, rand = 1, random.random() # initialize candidate elements
    while rand > 0.1:               # threshhold terminator
        print '*',                  # display the rejection
        if abs(last-rand) >= 0.4:   # accept the number
            last = rand             # update prior value
            yield rand              # return AT THIS POINT
        rand = random.random()      # new candidate
    yield rand                      # return the final small element

def getDirFilesCount(path):
	count = 0
	for content in os.listdir(path):
		if (content.find(".out") != -1):
			count += 1
	return count

def checkAssociations(index, path1, path2, edgeCutoff):
	global dualHash, summary, predictorHash
	predictorHash = {}
	fname = "associations_%i.out" %index 
	if ( not path1.endswith("/")):
		path1 = path1 + "/"
	if ( not path2.endswith("/")):
		 path2 = path2 + "/"
	f1name = path1 + fname
	f2name = path2 + fname
	f1reader = open(f1name, 'r')
	f2reader = open(f2name, 'r')
	#collect distinct source types while looping thru
	summary.write("\ncomparing " + fname + " in path1 " + path1 + " path2 " + path2 + "\n")
	dual = 0
	edge_iter = 1
	importanceList1 = []
	importanceList2 = []
	for edge in f1reader:
		if (edge_iter <= edgeCutoff):
			#break
			tokens = edge.strip().split('\t')
			predictorHash[tokens[1]] = edge_iter
			edge_iter += 1
		#capture all the importance, n1 n2 pv imp correlation
		importanceList1.append(float(edge.strip().split('\t')[3]))
	f1reader.close()
	#print "lines in predictor hash:" + str(len(predictorHash))
	#loop thru second set
	edge_iter = 1
	for edge in f2reader:
		if (edge_iter <= edgeCutoff):
			#break
			tokens = edge.strip().split('\t')
			if (predictorHash.get(tokens[1])):
				dual += 1
				dualHash[tokens[0] + "_" + tokens[1]] = str(predictorHash.get(tokens[1])) + "_" + str(edge_iter)		
			edge_iter += 1
		importanceList2.append(float(edge.strip().split('\t')[3]))
	f2reader.close()
	#write statistics, loop thru dualHash
	impq125 = quantile.quantile(importanceList1,.25)
	impq150 = quantile.quantile(importanceList1,.5)
	impq175 = quantile.quantile(importanceList1,.75)
	impq190 = quantile.quantile(importanceList1,.9)
	impq225 = quantile.quantile(importanceList2,.25)
        impq250 = quantile.quantile(importanceList2,.5)
        impq275 = quantile.quantile(importanceList2,.75)
        impq290 = quantile.quantile(importanceList2,.9)
	print("Matched %i out of %i common edges in associations %s" %(dual, edgeCutoff, fname))        
	print("%s min %f max %f %s min %f max %f" %(f1name, min(importanceList1), max(importanceList1), f2name, min(importanceList2), max(importanceList2)))	
	print "quantile 25 list1 list2", impq125, impq225
	print "quantile 50 list1 list2", impq150, impq250
	print "quantile 75 list1 list2", impq175, impq275
	print "quantile 90 list1 list2", impq190, impq290
	summary.write("\nMatched %i out of %i common edges in associations %s" %(dual, edgeCutoff, fname))
	summary.write("\nlist1 min %f max %f list2 min %f max %f" %(min(importanceList1), max(importanceList1), min(importanceList2), max(importanceList2)))
	summary.write("\nquan_25 list1 list2 %f %f" %(impq125, impq225))
	summary.write("\nquan_50 list1 list2 %f %f" %(impq150, impq250))
	summary.write("\nquan_75 list1 list2 %f %f" %(impq175, impq275))
	summary.write("\nquan_90 list1 list2 %f %f" %(impq190, impq290))
	summary.write("\nCommon edges")
	for rec in dualHash:
		summary.write("\n" + rec + " " + dualHash.get(rec))	
	return

def main(path1, path2, edgeCutoff, numberOfFiles, randomMode = True):
	global upperBound, summary
	count1 = getDirFilesCount(path1)
	count2 = getDirFilesCount(path2)
	print "file counts %i %i" %(count1, count2)
	upperBound = count1	
	summaryname = "./summary%i%i" %(time.localtime().tm_mon,time.localtime().tm_mday)
	if (randomMode):
		summaryname = summaryname + "_random" + "numberOfFiles"
	summary = open(summaryname, 'w')
	summary.write("Comparing %s %s associationIndices %s\n" %(path1, path2, numberOfFiles))
	if (count1 != count2):
		print "Warning: the number association out files are not the same for the two paths"
		if (count2 < count1):			
			upperBound = count2
	checkedFile = 0
	doRange = False
	startRange = 0
	endRange = 0
	if (randomMode):
		endRange = int(numberOfFiles)
		doRange = True
	if (numberOfFiles.count('-') == 1):
		doRange = True
		startRange = int(numberOfFiles.split("-")[0])
		endRange = int(numberOfFiles.split("-")[1]) + 1
	if (doRange):	
		for indexRange in range(startRange,endRange):
			if (randomMode):
				associationIndex = random.randint(0, upperBound)
			else:
				associationIndex = indexRange
			print "comparing associations %i" % associationIndex
			checkAssociations(associationIndex, path1, path2, edgeCutoff)
	elif (not randomMode and numberOfFiles.count(',') > 1):
		print "processing array of files"
		newlist = map(int, numberOfFiles[0:].split(','))
		for associationIndex in newlist:
			print "comparing associations %i" % associationIndex
                        checkAssociations(associationIndex, path1, path2, edgeCutoff)
	summary.close()
	return "done"

if __name__=="__main__":
	parser = optparse.OptionParser(description="Program will randomly select association_x.out between rf_ace runs, implying that the feature input must be the same, and then look for similarity edges and their statistics\n\nusage: %prog [options] associationsPath1 associationsPath2 numberOfTopEdges2Check numFiles2Check/filesRange/filesList",version="%prog 1.0")
	parser.add_option('-s', '--bysource', help="To-Do in addition to checking top X edges, also check by source types", dest='bysource_mode', default=False, action='store_true')
	parser.add_option('-n', '--notrandom', help="Instead of randomly checking N association files, user define range or list of associations, ie 1-100 or 1,5,10,99", dest='notrandom_mode', default=False, action='store_true')
	(opts, args) = parser.parse_args()
	if (len(args) == 4):
		print ("Program begins %s" %(time.ctime()))
		main(args[0], args[1], int(args[2]), args[3], not(opts.notrandom_mode))
		print ("Program completed %s" %(time.ctime()))
	else:
		print "Try python comparedRFAssociations.py --help\nRequires: associationsPath1 associationsPath2 numberOfTopEdges2Check numFiles2Check"
		
