'''

Created on 25.5.2011

@author: Martin Vegi Kysel
@summary: the main application for batch processing of Go/NoGo log files
'''

# add the path of the project to the pythonPath, solves bugs with relative importing
import sys,os
pathname = os.path.dirname(sys.argv[0])
parentPath= os.path.abspath(os.path.join(pathname,'..'))
print 'started in: ',parentPath 
sys.path.append(parentPath)

from PyStats.MeanStandardizedResponseRateFunction import computeMeanStandardizedResponseRate
from PyStats.MannWhitneyFunction import MannWhitney
from PyStats.MeanStandardizedResponseRateFunction import NormalizeResponseRate
from FileReaders.PolTypeGnGLogFileReader import extractItems
from Utils.StringProcessor import extractNonDigitPrefix
import Tkinter
from tkFileDialog import askdirectory

#global variables - feel free to change these!

# separator sign, alternatives: SEPARATOR_SIGN, \t
# used as separator in file output
SEPARATOR_SIGN = " "

# session size, defaults 40, the amount of trials in one session
SESSION_SIZE=40

# the first seconds of a trial to be taken into account, defaults 10, is inclusive (1-10)
SELECTED_SECONDS = 10


def createProcessedFile(fileOutputDirectory,infilename,extracted_ImageItems):
    f = open(fileOutputDirectory+"Processed_"+infilename,'w')
    f.write("Session"+SEPARATOR_SIGN+"Trial"+SEPARATOR_SIGN+"Class"+SEPARATOR_SIGN+"Image"+SEPARATOR_SIGN+"Pecks\n")
    writeThis = ""
    for item in extracted_ImageItems:
        writeThis = str(item.session)+SEPARATOR_SIGN+str(item.trial)+SEPARATOR_SIGN+str(item.itemClass)+SEPARATOR_SIGN+str(item.image_name)+SEPARATOR_SIGN+str(item.numPecks)
        f.write(writeThis+"\n")
    
    f.close

def createNormalizedFile(fileOutputDirectory,infilename,extracted_ImageItems):
    normalizedName = fileOutputDirectory+"Normalized_"+infilename
    
    f2 = open(normalizedName,'w')
    f2.write("Session"+SEPARATOR_SIGN+"Trial"+SEPARATOR_SIGN+"Class"+SEPARATOR_SIGN+"Image"+SEPARATOR_SIGN+"Normalized_Pecks\n")
    writeThis = ""
    for item in extracted_ImageItems:
        writeThis = str(item.session)+SEPARATOR_SIGN+str(item.trial)+SEPARATOR_SIGN+str(item.itemClass)+SEPARATOR_SIGN+str(item.image_name)+SEPARATOR_SIGN+str(item.numPecks)
        f2.write(writeThis+"\n")
    
    f2.close

def createTrainingStatisticsFile(fileOutputDirectory, infilename, normalized_ImageItems):
    f3 = open(fileOutputDirectory+"Training_Statistics_"+infilename,'w')
    f3.write("Session"+SEPARATOR_SIGN+"P"+SEPARATOR_SIGN+"z"+SEPARATOR_SIGN+"Rho"+SEPARATOR_SIGN+"U\n")
    writeThis = ""
    
    currentSession=1
    i = 0
    x = []
    y = []
    for item in normalized_ImageItems:
        if item.session>currentSession:
            currentArray = [-1,-1,-1,-1]
            if len(x)>0 and len(y)>0:
                currentArray = MannWhitney(x, y)
    
            writeThis = str(currentSession) + SEPARATOR_SIGN + str(currentArray[0])+ SEPARATOR_SIGN + str(currentArray[1])+ SEPARATOR_SIGN + str(currentArray[2])+ SEPARATOR_SIGN + str(currentArray[3])
            f3.write(writeThis+"\n")
            x = []
            y = []
            currentSession+=1
        
        if item.itemClass==2:
            y.append(item.numPecks)
        if item.itemClass==1:
            x.append(item.numPecks)    
            
        if i==(len(normalized_ImageItems)-1):
            currentArray = [-1,-1,-1,-1]
            if len(x)>0 and len(y)>0:
                currentArray = MannWhitney(x, y)
    
            writeThis = str(currentSession) + SEPARATOR_SIGN + str(currentArray[0])+ SEPARATOR_SIGN + str(currentArray[1])+ SEPARATOR_SIGN + str(currentArray[2])+ SEPARATOR_SIGN + str(currentArray[3])
            f3.write(writeThis+"\n")
            x = []
            y = []
        i+=1
    
            
    f3.close
    
def createMSRRFile(fileOutputDirectory, infilename, theDict):
    f4 = open(fileOutputDirectory+"MSRRs_"+infilename,'w')
    f4.write("Prefix"+SEPARATOR_SIGN+"MSRR"+SEPARATOR_SIGN+"StdDev\n")
    writeThis = ""
    for key in theDict.keys():
        currentArray = computeMeanStandardizedResponseRate(theDict[key])
        writeThis = str(key)+ SEPARATOR_SIGN +  str(currentArray[0])+ SEPARATOR_SIGN +  str(currentArray[1])
        f4.write(writeThis+"\n")
    
    f4.close
    
def createTestStatisticsFile(fileOutputDirectory, infilename, theDict):
    
    f5 = open(fileOutputDirectory+"Test_Statistics_"+infilename,'w')
    f5.write("Prefix1"+SEPARATOR_SIGN+"Prefix2"+SEPARATOR_SIGN+"P"+SEPARATOR_SIGN+"z"+SEPARATOR_SIGN+"Rho U\n")
    writeThis = ""
    
    
    i = 0
    # iterate over all combinations of test prefixes
    for key in theDict.keys():
        i += 1
        j = 0
        
        for key2 in theDict.keys():
            j += 1
            
            x = []
            y = []
            
            # skip the first in keys2
            if j <= i:
                continue
            
            # add items to the x,y lists
            for item in theDict[key]:
                x.append(item.numPecks)
            for item in theDict[key2]:
                y.append(item.numPecks)
            
            # get the statistics for 2 selected prefix keys
            currentArray = [-1, -1, -1, -1]
            if len(x) > 0 and len(y) > 0:
                currentArray = MannWhitney(x, y)
    
            writeThis = key + SEPARATOR_SIGN + key2 + SEPARATOR_SIGN + str(currentArray[0]) + SEPARATOR_SIGN + str(currentArray[1]) + SEPARATOR_SIGN + str(currentArray[2]) + SEPARATOR_SIGN + str(currentArray[3])
            f5.write(writeThis + "\n")
            
    f5.close



# this could be done recurrently, but i do not know the way
# python works with recursive directories... yet



top = Tkinter.Tk()
indirectory = askdirectory()

# prevent crashes when no directory is selected
if len(indirectory)==0:
    print 'No directory selected. Exiting.'
    exit(0)


outPath = "./../../output/"
if not os.path.isdir(outPath):
        os.mkdir (outPath)
    
outPath+= "GnG_Analysis/"
if not os.path.isdir(outPath):
        os.mkdir (outPath)
    
    
# batch process all log files    
for infilename in os.listdir(indirectory):
    
    #print infilename
    
    if ".DS" in infilename:
        continue
    
    if os.path.isdir(indirectory+"/"+infilename):
        #skip directories
        continue
    
    
        #create a new directory named after the input file without dots
    #example: this.is.a.log.file.log -> /path-to-output/thisisalogfilelog/
    fileOutputDirectory = outPath + infilename.replace(".", "")+ "/"
    if not os.path.isdir(fileOutputDirectory):
        os.mkdir (fileOutputDirectory)
    
    
    
    #based on the loaded logFileRedear, extract items returns a set of imageItems
    extracted_ImageItems = extractItems(indirectory+"/"+infilename, SESSION_SIZE, SELECTED_SECONDS)
    
    
    
    createProcessedFile(fileOutputDirectory, infilename, extracted_ImageItems)
    
    
    #normalize them from Pystats.MeanStandardizedResponseRate    
    normalized_ImageItems = NormalizeResponseRate(extracted_ImageItems)

    #initialize a dictionary for prefix bound imageItem lists    
    theDict = dict()


    # process normalized items
    for item in normalized_ImageItems:
        
        #only select test images
        if item.itemClass==0:
            
            #read the prefix from the name of the image
            prefix =extractNonDigitPrefix(item.image_name)
            
            # if the prefix does not exist yet in the dictionary create a new list and add it to the prefix key
            if not theDict.__contains__(prefix):
                newDictList = []
                theDict[prefix] = newDictList
                
            # append the normalized item to its prefix
            theDict[prefix].append(item)



    print 'altogether ',len(extracted_ImageItems),' items.'
    

    
    #create the files and write them
    createNormalizedFile(fileOutputDirectory, infilename, normalized_ImageItems)
    createTrainingStatisticsFile(fileOutputDirectory, infilename, normalized_ImageItems)
    createTestStatisticsFile(fileOutputDirectory, infilename, theDict)
    createMSRRFile(fileOutputDirectory, infilename, theDict)
    
top.mainloop()