# PBAR_Zspec_ProcessData.py
#
#
#   3/27/2013, John Kwong

# calculate distribution median, skew for each dataset

import numpy as np
import matplotlib.pyplot as plt
import numpy.matlib
import datetime
import time
from sklearn.lda import LDA


stats = dict()

statsList = ('binSum', 'binMean', 'binSTD', \
            'quantileBins_10', 'quantileBins_25', 'quantileBins_50', 'quantileBins_75', 'quantileBins_90', \
            'multibin_0_10', 'multibin_10_256', 'multibin_0_20', 'multibin_20_256', \
            'transmission')

for i in range(0,len(statsList)):
    stats[statsList[i]] = np.zeros((len(dat), dat[0].shape[1]))

stats['binSum'] = 

binSum = np.zeros((len(dat), dat[0].shape[1]))
binMean = np.zeros((len(dat), dat[0].shape[1]))
binSTD = np.zeros((len(dat), dat[0].shape[1]))
##binMedian = np.zeros((len(dat), dat[0].shape[1]))
##binSkew = np.zeros((len(dat), dat[0].shape[1]))
##binKurtosis = np.zeros((len(dat), dat[0].shape[1]))

quantilePercentages =  np.array([0.10, 0.25, 0.5, 0.90])

quantileBins_10 = np.zeros((len(dat), dat[0].shape[1]))
quantileBins_25 = np.zeros((len(dat), dat[0].shape[1]))
quantileBins_50 = np.zeros((len(dat), dat[0].shape[1]))
quantileBins_75 = np.zeros((len(dat), dat[0].shape[1]))
quantileBins_90 = np.zeros((len(dat), dat[0].shape[1]))

multibin_0_10 = np.zeros((len(dat), dat[0].shape[1]))
multibin_10_256 = np.zeros((len(dat), dat[0].shape[1]))

multibin_0_20 = np.zeros((len(dat), dat[0].shape[1]))
multibin_20_256 = np.zeros((len(dat), dat[0].shape[1]))

transmission = np.zeros((len(dat), dat[0].shape[1]))

## make some nice arrays to for doing matrix operations
x = np.arange(0,256)
binArray = np.matlib.repmat(x,dat[0].shape[1],1).T

for ii in range(len(dat)):
    datTemp = dat[ii].astype(float)
#     datTemp[0:1,:] = 0

    datTempCummulativeSum = datTemp.cumsum(axis = 0) # cumulative sum
    datTempCummulativeSum = datTempCummulativeSum / np.matlib.repmat(datTempCummulativeSum[-1,:], 256, 1) # normalize the cumulative sum

    # find the quantiles  find median, 25% and 75%
    quantileBins_10[ii,:] = np.argmin(abs(datTempCummulativeSum - 0.10), axis = 0)
    quantileBins_25[ii,:] = np.argmin(abs(datTempCummulativeSum - 0.25), axis = 0)
    quantileBins_50[ii,:] = np.argmin(abs(datTempCummulativeSum - 0.50), axis = 0)
    quantileBins_75[ii,:] = np.argmin(abs(datTempCummulativeSum - 0.75), axis = 0)
    quantileBins_90[ii,:] = np.argmin(abs(datTempCummulativeSum - 0.90), axis = 0)

    # calculate the mean and spread    
    binMean[ii,:] = (binArray * datTemp).sum(axis = 0).astype(float) / datTemp.sum(axis = 0).astype(float)
    binSum[ii,:] = datTemp.sum(axis = 0).astype(float)

    temp = np.matlib.repmat(binMean[ii,:], datTemp.shape[0],1)  # array of means, 256 x number detectors

    binSTD[ii,:] = np.sqrt(  ( ((binArray - temp)**2) * datTemp).sum(axis = 0).astype(float) / datTemp.sum(axis = 0).astype(float)  )

    # calculate histogram splits - no rebinning so no correction
    multibin_0_10[ii,:] = dat[ii][0:10,:].sum(axis=0)
    multibin_10_256[ii,:] = dat[ii][10:,:].sum(axis=0)
    multibin_0_20[ii,:] = dat[ii][0:20,:].sum(axis=0)
    multibin_20_256[ii,:] = dat[ii][20:,:].sum(axis=0)

    # transmission value - sum spectra / acquisition time
    transmission[ii,:] = datTemp.sum(axis = 0) / datasetAcquisitionTime[ii]

# make an array containing all the discriminates for a subset of the detectors

detectorList = goodDetectorsList.copy()[0]
detectorList = detectorList[(detectorList>50) & (detectorList < 80)]

##datasetGroupsIndices['CC']
groupNames

for ii in range(0, len(groupNames)):
    for jj in range(0, len(datasetGroups[groupNames[ii]])):
        index = datasetGroupsIndices[groupNames[ii]][jj]
        for kk in range(0,len(arrayNameList)):
            arrayName = arrayNameList[kk]
            temp = arrays[arrayName][ii,detectorList]
            temp = temp.reshape(size(temp))



# calculate gain shift variable
countRange = np.array([0.7e3, 1e3])
binMeanCollimatorClosed = np.zeros((len(dat), dat[0].shape[1]))
gainShift = np.zeros((len(dat), dat[0].shape[1]))

for ii in range(len(dat)):
    cutt = (binArray > 20) & (binArray < 150) & (dat[ii] > countRange[0]) & (dat[ii] < countRange[1])
    binMeanCollimatorClosed[ii,:] = (binArray * dat[ii] * cutt).sum(axis = 0) / (dat[ii] * cutt).sum(axis = 0)
    gainShift[ii,:] = binMeanCollimatorClosed[ii,:] / binMeanCollimatorClosed[datasetGroupsIndices['CC'][0],:]

# calculate gain correction vs time
binMeanCollimatorClosedExtrapolated = binMeanCollimatorClosed.copy()
for ii in range(0,137):
    xx = datasetTimeNum[datasetGroupsIndices['CC']];
    yy = binMeanCollimatorClosed[datasetGroupsIndices['CC'],ii];
    
    binMeanCollimatorClosedExtrapolated[:,ii] = np.interp(datasetTimeNum, xx, yy)
    
# extrapolate the the collimator closed values for other datasets

# calculate the correction value

# save spectra quantities to file


