# -*- coding: utf-8 -*-
"""

    PBAR_SavePerformanceMergedMarkers.py
    - loads all the performance values and saves to a single file

Created on Fri May 16 10:35:02 2014

@author: jkwong
"""


import glob, os, gc, objgraph, cPickle, time, copy
import numpy as np
import PBAR_Zspec, PBAR_Cargo
import matplotlib.pyplot as plt
from matplotlib import mpl
from scipy import ndimage
reload(PBAR_Zspec)

dataPath = r'E:\PBAR\data4\BasicScansStandardWidthPickle'
data0Path = r'E:\PBAR\data4\HighZFinder'
plotSaveDir = r'E:\PBAR\data4\HighZFinder'

num = 11

figureSize = (16, 10)

# Global parameters
MINBIN = 1
newWidth = 600
newHeight = 136
xCargoInternalBounds = np.array((38, 550))
yCargoInternalBounds = np.array((13, 106))
#cargoCountRange = np.array([0, 4e7]) # only consider parts of the zspec image that have rad below this value
cargoCountRange = np.array([0, 3e7]) # only consider parts of the zspec image that have rad below this value

ZspecCountRange = np.array([0, 275])

# make different size windows
# x and y
windowList = []
windowList.append(np.array((3, 1)))
windowList.append(np.array((5, 1)))
windowList.append(np.array((7, 1)))
windowList.append(np.array((4, 2)))
windowList.append(np.array((7, 2)))
# thresholds
discrimThresholdList = np.array((0, 2))
# fractions
fractionPixelsList = np.array((0.8, 1.0))

tolerance = [3, 1]

# get linear discriminant coefficients, polynomial fits

datasetDescription = PBAR_Zspec.ReadCargoDataDescriptionFile(r'E:\PBAR\data4\CargoSet2.txt')


numberCombinations =len(windowList) * len(discrimThresholdList) * len(fractionPixelsList) * len(datasetDescription['scanID'])

performanceSummary = {'datasetIndex':np.zeros(numberCombinations), 'discrimThreshold':np.zeros(numberCombinations), \
    'window': np.zeros((numberCombinations, 2)), 'fractionPixels': np.zeros(numberCombinations), \
    'f1': np.zeros(numberCombinations), 'success': np.zeros(numberCombinations), \
    'numMM': np.zeros(numberCombinations), 'numTruthMarkers': np.zeros(numberCombinations), \
    'numTrueHighZ': np.zeros(numberCombinations), 'TP': np.zeros(numberCombinations), \
    'numMMCorrect': np.zeros(numberCombinations), 'recall': np.zeros(numberCombinations), \
    'precision': np.zeros(numberCombinations), 'scanID': np.zeros(numberCombinations), \
    'config': np.zeros(numberCombinations)}

combinationIndex = -1
for datasetIndex, datasetDescript in enumerate(datasetDescription['scanID']):
    print('%d, %s' %(datasetIndex, datasetDescript))
    zspecScanNumber = np.int(datasetDescript)

    # Read Zspec
    filenameZspec = '%s-FDFC-All_SW.dat' %datasetDescription['scanID'][datasetIndex]
    fullFilenameZspec = os.path.join(dataPath, filenameZspec)
    
    # Read Cargo
    filenameCargo = 'PBAR-%s.cargoimageSW.dat' %datasetDescription['dataFile'][datasetIndex]
    fullFilenameCargo = os.path.join(dataPath, filenameCargo)

#    # read in the cargo image
#    print('Loading %s' %fullFilenameCargo)
#    with open(fullFilenameCargo, 'rb') as fid:
#        datCargoStandardWidth = cPickle.load(fid)    
#    datCargoSmall = PBAR_Zspec.CargoReduceSize(datCargoStandardWidth, newHeight, newWidth)
    
#    energy, datZspecStandardWidth = PBAR_Zspec.ReadZspecBasicScanPickle(fullFilenameZspec)
#    datZspecSmall = PBAR_Zspec.ZspecBasicReduceSize(datZspecStandardWidth, energy, newWidth)
#    # calculate the features for reduced size zspec image
#    discrimSmall = PBAR_Zspec.CalculateFeaturesZspecBasic(datZspecSmall, energy, MINBIN)    
    
    # Read in marker files
    filenameMarker = filenameCargo.replace('cargoimageSW.dat', 'cargomarkerSW')
    fullFilenameMarker = fullFilenameCargo.replace('cargoimageSW.dat', 'cargomarkerSW')
    
    # some don't have marker files
    if os.path.exists(fullFilenameMarker):
        markerStandardWidth0 = PBAR_Cargo.ReadCargoMarker(fullFilenameMarker)
    else:
        markerStandardWidth0 = []
    
#     Modify the marker files for truncated
#    multiplier = newWidth/float(datZspecStandardWidth.shape[0])
    multiplier = newWidth/float(newWidth)

    offset = 0.0
    markerStandardWidth = PBAR_Zspec.ModifyMarkersXPosition(markerStandardWidth0, multiplier, offset)

    numberMarkers = len(markerStandardWidth)
    
    # determine if marker is high z (DU or W) or not (Fe or Pb)
    highZMaterial = np.zeros(numberMarkers) == 1 # start with all false
    for markerIndex, marker in enumerate(markerStandardWidth):
        # s = special, w = tungsten, du = depleted uranium
        if ( (marker['target'][0].lower() == 's') or (marker['target'][0].lower() == 'w') or (marker['target'][0].lower() == 'd')):
            highZMaterial[markerIndex] = True

    for windowIndex, window in enumerate(windowList):
        for discrimThresholdIndex, discrimThreshold in enumerate(discrimThresholdList):
            for fractionPixelsIndex, fractionPixels in enumerate(fractionPixelsList):
                
                combinationIndex += 1

                tStart = time.time()
                
                # makes them all have the same reference, I think
#                highZMarkersCrossRef = [{'mergedMergedIndex':[]}] * numberMarkers

                highZMarkersCrossRef = []
                for i in xrange(numberMarkers):
                    highZMarkersCrossRef.append({})
                    highZMarkersCrossRef[i]['mergedMergedIndex'] = []
                
                fullFilenamePotential = \
                    fullFilenameMarker.replace('cargomarkerSW', \
                        '%d_%d_%d_%3.2f.potential' %(window[0], window[1], discrimThreshold, fractionPixels) )
                print('Working on %s' %fullFilenamePotential)
                
                # load original high markers
                with open(fullFilenamePotential, 'rb') as fid:
                    print('Load Original Markers: %s' %fullFilenamePotential)
                    highZMarkersList = cPickle.load(fid)
                # load the merged markers
                fullFilenameMarkerMerged = fullFilenamePotential.replace('potential', 'highZMerged')
                with open(fullFilenameMarkerMerged, 'rb') as fid:
                    print('Load Merged Markers: %s' %fullFilenameMarkerMerged)
                    highZMergedMarkersList = cPickle.load(fid)
                # read merged merged markers
                fullFilenameMarkerMerged = fullFilenamePotential.replace('potential', 'highZMergedMerged')
                with open(fullFilenameMarkerMerged, 'rb') as fid:
                    print('Load Merged Merged Markers: %s' %fullFilenameMarkerMerged)
                    highZMergedMergedMarkersList = cPickle.load(fid)
                # read the merged merged marker performance
                fullFilenamePerformance = fullFilenamePotential.replace('potential', 'performance')
                with open(fullFilenamePerformance, 'rb') as fid:
                    print('Write: %s' %fullFilenamePerformance)
                    dat = cPickle.load(fid)
                print('time elapsed3: %3.3f' %(time.time() - tStart))

                # calculate success if possible
                try:
                    success = sum( (dat['markerMergedMergedOverlapCount'] > 0) == dat['highZMaterial']) / np.float(len(dat['highZMaterial']))
                except:
                    success = np.nan

                performanceSummary['datasetIndex'][combinationIndex] = datasetIndex
                performanceSummary['config'][combinationIndex] = datasetDescription['config'][datasetIndex]
                performanceSummary['scanID'][combinationIndex] = datasetDescription['scanID'][datasetIndex]

                performanceSummary['discrimThreshold'][combinationIndex] = discrimThreshold
                performanceSummary['window'][combinationIndex,:] = window
                performanceSummary['fractionPixels'][combinationIndex] = fractionPixels

                performanceSummary['success'][combinationIndex] = success
                performanceSummary['numMM'][combinationIndex] = len(dat['mergedMergedFalsePositives'])
                performanceSummary['numTruthMarkers'][combinationIndex] = len(dat['highZMaterial'])
                performanceSummary['numTrueHighZ'][combinationIndex] = sum(dat['highZMaterial'])
                performanceSummary['TP'][combinationIndex] = sum(   (dat['markerPrediction'] == dat['highZMaterial']) & (dat['highZMaterial'] == 1)   )
                performanceSummary['numMMCorrect'][combinationIndex] = sum(dat['mergedMergedTruePositives'])
                
                # Recall = TP / (TP+FP)  = Number of high-z marked by MM / (# of high-z materials)
                try:
                    temp = performanceSummary['TP'][combinationIndex] / performanceSummary['numTrueHighZ'][combinationIndex]
                except:
                    temp = np.nan
                performanceSummary['recall'][combinationIndex] = temp
                # Precision = TP / (TP + FP) = Number of high-z marked by MM / (# of high-z marked by MM + number of MM’s that marked something low-z (background, Pb or Fe) 
                try:
                    temp = performanceSummary['TP'][combinationIndex] / (performanceSummary['numTrueHighZ'][combinationIndex] + sum(dat['mergedMergedTruePositives']))
                except:
                    temp = np.nan     
                performanceSummary['precision'][combinationIndex] = temp

                try:
                    f1 = 2 * performanceSummary['recall'][combinationIndex] * performanceSummary['precision'][combinationIndex] / (performanceSummary['recall'][combinationIndex] + performanceSummary['precision'][combinationIndex])
                except:
                    success = np.nan

                performanceSummary['f1'][combinationIndex] = f1

                print('time elapsed3: %3.3f' %(time.time() - tStart))
#                break
#            break
#        break
#    break

# save to file
fullFilenamePerformance = os.path.join(data0Path, 'performanceSummary.dat')
with open(fullFilenamePerformance, 'wb') as fid:
    print('Write: %s' %fullFilenamePerformance)
    cPickle.dump(performanceSummary, fid, 2)


