# PBAR_Zspec_ProcessData_Ver2.py
#
#
#   3/27/2013, John Kwong
#   4/23/2013, corrected some minor bugs, added correction across detectors, move LDA stuff to another script

import csv
import numpy as np
import matplotlib.pyplot as plt
import numpy.matlib
import datetime
import time
from mpl_toolkits.mplot3d import Axes3D

##
# CALCULATE GAIN SHIFT BIN
##

# Count range for calculating bin
countRange = np.array([0.7e3, 1e3])
binMeanCC = np.zeros((len(dat), dat[0].shape[1]))
gainShift = np.zeros((len(dat), dat[0].shape[1]))

binArray = np.matlib.repmat(np.arange(0,dat[0].shape[0]),dat[0].shape[1],1).T

for ii in range(len(dat)):
    cutt = (binArray > 20) & (binArray < 150) & (dat[ii] > countRange[0]) & (dat[ii] < countRange[1])
    binMeanCC[ii,:] = (binArray * dat[ii] * cutt).sum(axis = 0) / (dat[ii] * cutt).sum(axis = 0)
    gainShift[ii,:] = binMeanCC[ii,:] / binMeanCC[0,:]

# EXTRAPOLATE GAIN SHIFT BIN FOR OTHER DATASETS BASED ON TIME STAMP
binMeanCCExtrapolated = binMeanCC.copy()
for ii in range(0,136):
    xx = datasetTimeNum[datasetGroupsIndices['CC']];
    yy = binMeanCC[datasetGroupsIndices['CC'],ii];
    binMeanCCExtrapolated[:,ii] = np.interp(datasetTimeNum, xx, yy)

##
##CALCULATE SPECTRA STATS
##

lowerBinThreshold = 7


stats = dict()
statsList = ('binSum', \
             'binMean', 'binSTD', 'binSTD_binMean',\
            'q_10', 'q_25', 'q_50', 'q_75', 'q_90', \
            'q_range', 'q_range_ratio', 'binMeanSq', 'binMean_q50', \
             \
            'binMean_g', 'binSTD_g',\
            'q_10_g', 'q_25_g', 'q_50_g', 'q_75_g', 'q_90_g', \
            'q_range_g', 'binMeanSq_g', 'binMean_q50_g', \
             \
             'binMean_g1', 'binSTD_g1',\
            'q_10_g1', 'q_25_g1', 'q_50_g1', 'q_75_g1', 'q_90_g1', \
            'q_range_g1', 'binMeanSq_g1', 'binMean_q50_g1', \
             \
            'multibin_0_10', 'multibin_10_256', 'multibin_0_20', 'multibin_20_256', \
            'multibin_10_ratio', 'multibin_20_ratio', \
             \
            'multibin_0_10_g', 'multibin_10_256_g', 'multibin_0_20_g', 'multibin_20_256_g', \
            'multibin_10_ratio_g', 'multibin_20_ratio_g', \
             \
            'multibin_0_10_g1', 'multibin_10_256_g1', 'multibin_0_20_g1', 'multibin_20_256_g1', \
            'multibin_10_ratio_g1', 'multibin_20_ratio_g1', \
            'transmission')

for i in range(0,len(statsList)):
    stats[statsList[i]] = np.zeros((len(dat), dat[0].shape[1]))

## make some nice arrays to for doing matrix operations

for ii in range(len(dat)):  # cycle through the dataset
    datTemp = dat[ii].astype(float)

    # remove the lower bins by setting it to zero
    datTemp[0:lowerBinThreshold,:] = 0

    datTempCummulativeSum = datTemp.cumsum(axis = 0) # cumulative sum
    datTempCummulativeSum = datTempCummulativeSum / np.matlib.repmat(datTempCummulativeSum[-1,:], 256, 1) # normalize the cumulative sum

    # find the quantiles  find median, 25% and 75%
    stats['q_10'][ii,:] = np.argmin(abs(datTempCummulativeSum - 0.10), axis = 0)
    stats['q_25'][ii,:] = np.argmin(abs(datTempCummulativeSum - 0.25), axis = 0)
    stats['q_50'][ii,:] = np.argmin(abs(datTempCummulativeSum - 0.50), axis = 0)
    stats['q_75'][ii,:] = np.argmin(abs(datTempCummulativeSum - 0.75), axis = 0)
    stats['q_90'][ii,:] = np.argmin(abs(datTempCummulativeSum - 0.90), axis = 0)
    # quantile range
    stats['q_range'][ii,:] = stats['q_75'][ii,:] - stats['q_25'][ii,:]
    stats['q_range_ratio'][ii,:] = stats['q_range'][ii,:] / stats['q_50'][ii,:]

    # calculate the mean and spread    
    stats['binMean'][ii,:] = (binArray * datTemp).sum(axis = 0).astype(float) / datTemp.sum(axis = 0).astype(float)
    stats['binMeanSq'][ii,:] = ((binArray**2) * datTemp).sum(axis = 0).astype(float) / datTemp.sum(axis = 0).astype(float)
    stats['binSum'][ii,:] = datTemp.sum(axis = 0).astype(float)

    stats['binMean_q50'][ii,:] = stats['binMean'][ii,:] - stats['q_50'][ii,:]

    temp = np.matlib.repmat(stats['binMean'][ii,:], datTemp.shape[0],1)  # array of means, 256 x number detectors

    stats['binSTD'][ii,:] = np.sqrt(  ( ((binArray - temp)**2) * datTemp).sum(axis = 0).astype(float) / datTemp.sum(axis = 0).astype(float)  )

    stats['binSTD_binMean'][ii,:] = stats['binSTD'][ii,:] / stats['binMean'][ii,:]

    # CALCULATE THE GAIN CORRECTED parameters
    baselineIndex = datasetGroupsIndices['CC'][0]
    baselineDetector = 68 # detector 69

    # correct accross time
    g = binMeanCCExtrapolated[baselineIndex,:] / binMeanCCExtrapolated[ii,:]
    # correct across detectors; line up gains to detector 69, first CC datasets
    g1 = binMeanCCExtrapolated[baselineIndex,baselineDetector] / binMeanCCExtrapolated[baselineIndex,:]
    
    stats['q_10_g'][ii,:] = stats['q_10'][ii,:] * g
    stats['q_25_g'][ii,:] = stats['q_25'][ii,:] * g
    stats['q_50_g'][ii,:] = stats['q_50'][ii,:] * g
    stats['q_75_g'][ii,:] = stats['q_75'][ii,:] * g
    stats['q_90_g'][ii,:] = stats['q_90'][ii,:] * g
    stats['q_range_g'][ii,:] = stats['q_range'][ii,:] * g
    stats['binMean_g'][ii,:] = stats['binMean'][ii,:] * g
    stats['binMeanSq_g'][ii,:] = stats['binMeanSq'][ii,:] * g
    stats['binMean_q50_g'][ii,:] = stats['binMean_q50'][ii,:] * g
    stats['binSTD_g'][ii,:] = stats['binSTD'][ii,:] * g

    stats['q_10_g1'][ii,:] = stats['q_10'][ii,:] * g * g1
    stats['q_25_g1'][ii,:] = stats['q_25'][ii,:] * g * g1
    stats['q_50_g1'][ii,:] = stats['q_50'][ii,:] * g * g1
    stats['q_75_g1'][ii,:] = stats['q_75'][ii,:] * g * g1
    stats['q_90_g1'][ii,:] = stats['q_90'][ii,:] * g * g1
    stats['q_range_g1'][ii,:] = stats['q_range'][ii,:] * g * g1
    stats['binMean_g1'][ii,:] = stats['binMean'][ii,:] * g * g1
    stats['binMeanSq_g1'][ii,:] = stats['binMeanSq'][ii,:] * g * g1
    stats['binMean_q50_g1'][ii,:] = stats['binMean_q50'][ii,:] * g * g1
    stats['binSTD_g1'][ii,:] = stats['binSTD'][ii,:] * g * g1

    # MULTIBIN PARAMETERS
    # calculate histogram splits - no rebinning so no correction
    stats['multibin_0_10'][ii,:] = datTemp[0:10,:].sum(axis=0)  # bins 0 to 9, so first ten bins
    stats['multibin_10_256'][ii,:] = datTemp[10:,:].sum(axis=0)
    stats['multibin_0_20'][ii,:] = datTemp[0:20,:].sum(axis=0)
    stats['multibin_20_256'][ii,:] = datTemp[20:,:].sum(axis=0)

    stats['multibin_10_ratio'][ii,:]  = stats['multibin_0_10'][ii,:]  /stats['multibin_10_256'][ii,:]
    stats['multibin_20_ratio'][ii,:]  = stats['multibin_0_20'][ii,:]  /stats['multibin_20_256'][ii,:]

    # calculate the histogram splits with correction for temporal gain shift
    baselineIndex = datasetGroupsIndices['CC'][0]
    binCorrection = binMeanCCExtrapolated[ii,:]/binMeanCC[baselineIndex,:] # current/baseline

    # bin 10
    thresholdMat = np.round(np.matlib.repmat(binCorrection, 256,1)*9.0)  # threshold
    cutt1 = binArray <= thresholdMat
    cutt1 = cutt1.astype(float)
    cutt2 = binArray > thresholdMat
    cutt2 = cutt2.astype(float)
    
    stats['multibin_0_10_g'][ii,:] = (datTemp * cutt1).sum(axis=0)
    stats['multibin_10_256_g'][ii,:] = (datTemp * cutt2).sum(axis=0)

    # bin 20
    thresholdMat = np.round(np.matlib.repmat(binCorrection, 256,1)*19.0)  # threshold
    cutt1 = binArray <= thresholdMat
    cutt1 = cutt1.astype(float)
    cutt2 = binArray > thresholdMat
    cutt2 = cutt2.astype(float)

    stats['multibin_0_20_g'][ii,:] = (datTemp * cutt1).sum(axis=0)
    stats['multibin_20_256_g'][ii,:] = (datTemp * cutt2).sum(axis=0)

    stats['multibin_10_ratio_g'][ii,:]  = stats['multibin_0_10_g'][ii,:]  /stats['multibin_10_256_g'][ii,:]
    stats['multibin_20_ratio_g'][ii,:]  = stats['multibin_0_20_g'][ii,:]  /stats['multibin_20_256_g'][ii,:]

    # CORRECT ACROSS DETECTORS using the first CC dataset to correct
    baselineIndex = datasetGroupsIndices['CC'][0]
    binCorrection = binMeanCCExtrapolated[ii,:]/binMeanCC[baselineIndex,:] # current/baseline
    temp = binMeanCCExtrapolated[baselineIndex,:] / binMeanCCExtrapolated[baselineIndex,baselineDetector]
    binCorrection = binCorrection * temp

    # bin 10
    thresholdMat = np.round(np.matlib.repmat(binCorrection, 256,1)*9.0)  # threshold
    cutt1 = binArray <= thresholdMat
    cutt1 = cutt1.astype(float)
    cutt2 = binArray > thresholdMat
    cutt2 = cutt2.astype(float)
    
    stats['multibin_0_10_g1'][ii,:] = (datTemp * cutt1).sum(axis=0)
    stats['multibin_10_256_g1'][ii,:] = (datTemp * cutt2).sum(axis=0)

    # bin 20
    thresholdMat = np.round(np.matlib.repmat(binCorrection, 256,1)*19.0)  # threshold
    cutt1 = binArray <= thresholdMat
    cutt1 = cutt1.astype(float)
    cutt2 = binArray > thresholdMat
    cutt2 = cutt2.astype(float)

    stats['multibin_0_20_g1'][ii,:] = (datTemp * cutt1).sum(axis=0)
    stats['multibin_20_256_g1'][ii,:] = (datTemp * cutt2).sum(axis=0)

    stats['multibin_10_ratio_g1'][ii,:] = stats['multibin_0_10_g1'][ii,:] / stats['multibin_10_256_g1'][ii,:]
    stats['multibin_20_ratio_g1'][ii,:] = stats['multibin_0_20_g1'][ii,:] / stats['multibin_20_256_g1'][ii,:]

    # transmission value - sum spectra / acquisition time
    stats['transmission'][ii,:] = datTemp.sum(axis = 0) / datasetAcquisitionTime[ii]

# SAVE VALUES TO FILE

basepath = r'C:\Users\jkwong\Documents\Work\PBAR\data'

for ii in range(len(statsList)):
    fid = open(basepath + '\\' + 'zspecNtuples' + '\\' + statsList[ii] + '.csv', 'wb')
    csvWriterObj = csv.writer(fid)
    for jj in range(stats[statsList[ii]].shape[0]):
        csvWriterObj.writerow(np.hstack( ( filenameList[jj], stats[statsList[ii]][jj,:].astype('|S6'))))
    fid.close()

# CALCULATE MEANS AND SIGMAS OF THE SINGLE-MATERIAL CLUSTERS FOR EACH REGION

transmissionBoundList = (np.array((2200,5240)), np.array((5240,12700)), np.array((12700,20000)) )

##fit polynomial to the 

##groupMean = dict()
##groupSigma = dict()
####groupMean['multibin_10_ratio_g']['Fe'][0]
##materialsList = ('Pb', 'Fe', 'Al')
##
####cycle through statsList
####    cycle through materials
####        cycle through boundsList
####            calculate the mean, sigma
##
##for ii in range(len(statList)):
##    for jj in range(len(materialsList)):
##    # build large array of number
##        
##        for kk in range(len(transmissionBoundList)):
##            groupMean = 
##            


##
# CONVERT RAD TO ZSPEC SIZE ARRAY
##
radDatasetNames = np.array(datRad.keys())

##datRadZspec = np.zeros(radDatasetNames.shape[0], 137)
datRadZspec = dict()
for i in range(0,len(radDatasetNames)):
    datRadZspec[radDatasetNames[i]] = np.zeros(len(zspecToRad))
    for j in range(0,len(zspecToRad)):
        datRadZspec[radDatasetNames[i]][j] = datRad[radDatasetNames[i]][zspecToRad[j]].mean()

# extrapolate the the collimator closed values for other datasets

# calculate the correction value

# save spectra quantities to file


