# -*- coding: utf-8 -*-
"""
PBAR_CreateStandardSizeImagesVer2.py

Make standard cargo and zspec size images, modified markers that work with these new images.

5/14/2014, Creates both numpy and pickle versions now in different directories
5/21/2014, Not writes cargomarkerSW to both npy and pickle directories


Based on PBAR_CreateStandardSizeImages.py

Assumes all files in one directory as opposed to the old multiple layer directory structure.
Set to work on the 2nd set of cargo data.
Needs CargoSet2.txt spreadsheet to run
No figures generated/saved.

 
Created on Thu May 01 07:57:00 2014

@author: jkwong
"""

import copy, glob, shutil, codecs, os, cPickle, shutil
import PBAR_Zspec, PBAR_Cargo
reload(PBAR_Zspec)
reload(PBAR_Cargo)
import numpy as np
import matplotlib.pyplot as plt
from scipy import interpolate
from matplotlib import cm
from scipy import ndimage

# data input and output locations
dataPath = r'E:\PBAR\data4\BasicScanCargo'
dataOutputBaseDir = r'E:\PBAR\data4\BasicScansStandardWidth'
dataPickleOutputBaseDir = r'E:\PBAR\data4\BasicScansStandardWidthPickle'

# define parameters for standard width images
numberTimeSlices = 2000
preCargoTimeSlices = 70

#################################
## DEFINE FILE NAMES

datasetDescription = PBAR_Zspec.ReadCargoDataDescriptionFile(r'E:\PBAR\data4\CargoSet2.txt')

for datasetIndex, datasetDescript in enumerate(datasetDescription['scanID']):
    print(datasetIndex)
#    if (datasetIndex < 46):
#        continue
    if datasetIndex < 71:
        continue
#

    filenameZspec = '%s-FDFC-All.npy' %datasetDescription['scanID'][datasetIndex]
    fullFilenameZspec = os.path.join(dataPath, filenameZspec)
    print('Loading %s' %fullFilenameZspec)
#    datZspec = np.load(fullFilenameZspec)
    (energyZspec, datZspec) = PBAR_Zspec.ReadZspecBasicScanNumpy(fullFilenameZspec)

    startBinZspec = PBAR_Zspec.FindBasicZspecStartVer2(datZspec)
        
    temp = datZspec[(startBinZspec-preCargoTimeSlices):,:][0:numberTimeSlices,:,:] # this could have length less than numberTimeSlices    
        
    # This step ensure that we have standard size images
    # create a container of zeros that is numberTimeSlices long 
    datZspecSW = np.zeros((numberTimeSlices, datZspec.shape[1], datZspec.shape[2]))
    if temp.shape[0] < numberTimeSlices:  # if less than number of time slices
        datZspecSW[0:temp.shape[0],:,:] = temp
    else:
        datZspecSW = temp  # already has numberTimeSlices number of slices so good to go
    # write standard width file
    datZspecSW = datZspecSW.astype(np.uint16)

    # save the standard width image to file
    filenameZspecSW = filenameZspec.replace('All', 'All_SW')
    fullFilenameZspecSW = os.path.join(dataOutputBaseDir, filenameZspecSW)
    np.save(fullFilenameZspecSW, datZspecSW)
    print('Wrote: %s' %fullFilenameZspecSW)

    # write pickle version
    fullFilenameZspecSWPickle = os.path.join(dataPickleOutputBaseDir, filenameZspecSW).replace('All_SW.npy', 'All_SW.dat')
    with open(fullFilenameZspecSWPickle, 'wb') as fid:
        print('Write %s' %fullFilenameZspecSWPickle)
        cPickle.dump(datZspecSW, fid, 2)

    datZspecSW = datZspecSW.astype(np.float)
    numberZspecDetectors = datZspec.shape[1]

    # Read Cargo
    filenameCargo = 'PBAR-%s.cargoimage' %datasetDescription['dataFile'][datasetIndex]
    fullFilenameCargo = os.path.join(dataPath, filenameCargo)
    print('Loading %s' %fullFilenameCargo)
    (datCargo,bpp,formatt,flag,low1,high1,low2,high2) = PBAR_Cargo.ReadCargoImage(fullFilenameCargo)

    
    # Create standard width image for cargo/rad    
    # this should do the same thing
    indices1 = 1 + 4 * np.arange(136) 
    indices2 = 2 + 4* np.arange(136)
    temp = (datCargo[:,indices1] + datCargo[:,indices2])/2
    
    # get the start bin
    startBinCargo = PBAR_Cargo.FindCargoStart(datCargo)
    
    # cut section in front and limit to numberdataPickleOutputBaseDirTimeSlices number of time slices
    temp = temp[(startBinCargo-preCargoTimeSlices):,:][0:numberTimeSlices,:]
    
    # This ensure that we have standard size images
    # excess is filled with zeros
    datCargoSW = np.zeros((numberTimeSlices, numberZspecDetectors))
    if temp.shape[0] < numberTimeSlices:
        datCargoSW[0:temp.shape[0],:] = temp
    else:
        datCargoSW = temp[0:numberTimeSlices,:]
    
    # write cargo standard width file
    filenameCargoSW = filenameCargo.replace('cargoimage', 'cargoimageSW.npy')
    fullFilenameCargoSW = os.path.join(dataOutputBaseDir, filenameCargoSW)
    np.save(fullFilenameCargoSW, datCargoSW)
    print('Wrote %s' %fullFilenameCargoSW)
    # write pickle version
    fullFilenameCargoSWPickle = os.path.join(dataPickleOutputBaseDir, filenameCargoSW).replace('cargoimageSW.npy', 'cargoimageSW.dat')
    with open(fullFilenameCargoSWPickle, 'wb') as fid:
        print('Write %s' %fullFilenameCargoSWPickle)
        cPickle.dump(datCargoSW, fid, 2)
    
    # clean up unused variables
    del bpp,formatt,flag,low1,high1,low2,high2
    del indices1, indices2, temp

    # Read in marker files
    filenameMarker = filenameCargo.replace('cargoimage', 'cargomarker')
    fullFilenameMarker = fullFilenameCargo.replace('cargoimage', 'cargomarker')
    # some don't have marker files
    if os.path.exists(fullFilenameMarker):
        markers = PBAR_Cargo.ReadCargoMarker(fullFilenameMarker)
    else:
        print('Does not exist, skipping: %s' %fullFilenameMarker)
        continue
    # Modify the marker files based on found start times
    
    # first copy over
    temp = copy.deepcopy(markers)
    
    # adjust the x values
    offset = (startBinCargo - preCargoTimeSlices)
    for i in np.arange(len(temp)):
        temp[i]['rec_left'] = temp[i]['rec_left'] - offset
        temp[i]['rec_right'] = temp[i]['rec_right'] - offset
        temp[i]['x'] = temp[i]['x'] - offset# write standard width file
        # see if 'left and 'right exist
        if 'left' in temp[i]:
            temp[i]['left']['x'] = temp[i]['left']['x'] - offset
        if 'right' in temp[i]:
            temp[i]['right']['x'] = temp[i]['right']['x'] - offset
    # adjust the y values
    for i in np.arange(len(temp)):
        temp[i]['rec_top'] = np.round((temp[i]['rec_top'] - 1.5) / 4.0)
        temp[i]['rec_bottom'] = np.round((temp[i]['rec_bottom'] - 1.5) / 4.0)
        temp[i]['y'] = np.round((temp[i]['y'] - 1.5) / 4.0)
        # see if 'left and 'right exist
        if 'left' in temp[i]:
            temp[i]['left']['y'] = np.round((temp[i]['left']['y'] - 1.5) / 4.0)
        if 'right' in temp[i]:
            temp[i]['right']['y'] = np.round((temp[i]['right']['y'] - 1.5) / 4.0)
    markerStandardWidth = copy.copy(temp)
    

    # save modified marker file
    # read cargomarker, adust offset, save to new marker file

    offset = (startBinCargo-preCargoTimeSlices)
    # input marker file
    filenameMarkerSW = filenameMarker.replace('cargomarker', 'cargomarkerSW')
    # output marker file
    fullFilenameMarkerSW = os.path.join(dataOutputBaseDir, filenameMarkerSW)
    
    # open old file for reading
    with codecs.open(fullFilenameMarker, 'rb', encoding = 'utf-16') as fidOld:
        # open file for writing
        with codecs.open(fullFilenameMarkerSW, 'wb', encoding = 'utf-16') as fid:
            # Check to see if the file has been convereted already by checking for existence of cargomarkerOLD file.
            for lineIn in fidOld:
                if len(lineIn) < 3:
                    fid.write(lineIn) # the first line seems to contain some whitespace
                else:
                    lineIn2 = copy.copy(lineIn)
                    # Modify the entires
                    preStringList = ['left=', 'right=', 'center x=', 'left x=', 'right x=']                    
                    # cycle through the list of x direction values.
                    for (index, preString) in enumerate(preStringList):
                        # check if it actually exists
                        if lineIn2.find(preString) > -1:
                            index1 = lineIn2.find(preString) + len(preString) + 1 # index of largest digit
                            index2 = lineIn2[index1:].find('"')+index1 # index parenthesis to the right of the smallest digit
                            theNumber = int(lineIn2[index1:index2]) - offset
                            lineIn2 = u'%s%d%s' %(lineIn2[:index1], theNumber, lineIn2[index2:])
                            
                    preStringList = ['top=', 'bottom=', ' y=']                    
                    # cycle through the list of x direction values.
                    for (index, preString) in enumerate(preStringList):
                        # check if it actually exists
                        if lineIn2.find(preString) > -1:
                            index1 = lineIn2.find(preString) + len(preString) + 1 # index of largest digit
                            index2 = lineIn2[index1:].find('"')+index1 # index parenthesis to the right of the smallest digit
                            theNumber = round((int(lineIn2[index1:index2]) - 1.5 ) /4.0 )
                            lineIn2 = u'%s%d%s' %(lineIn2[:index1], theNumber, lineIn2[index2:])
                            if lineIn2.find(preString, index1+1) > -1:
                                index1 = lineIn2.find(preString, index1+1) + len(preString) + 1 # index of largest digit
                                index2 = lineIn2[index1:].find('"')+index1 # index parenthesis to the right of the smallest digit
                                theNumber = round((int(lineIn2[index1:index2]) - 1.5 ) /4.0 )
                                lineIn2 = u'%s%d%s' %(lineIn2[:index1], theNumber, lineIn2[index2:])
                                if lineIn2.find(preString, index1+1) > -1:
                                    index1 = lineIn2.find(preString, index1+1) + len(preString) + 1 # index of largest digit
                                    index2 = lineIn2[index1:].find('"')+index1 # index parenthesis to the right of the smallest digit
                                    theNumber = round((int(lineIn2[index1:index2]) - 1.5 ) /4.0 )
                                    lineIn2 = u'%s%d%s' %(lineIn2[:index1], theNumber, lineIn2[index2:])
                            
                    #write the modified line to the new file
                    fid.write(lineIn2)
    fid.close()
    fidOld.close()
    print('Wrote: %s' %fullFilenameMarkerSW)
    # copy to pickel location too
    fullFilenameMarkerSW_source = os.path.join(dataOutputBaseDir, filenameMarkerSW)
    fullFilenameMarkerSW_destination = os.path.join(dataPickleOutputBaseDir, filenameMarkerSW)
    shutil.copy(fullFilenameMarkerSW_source, fullFilenameMarkerSW_destination)