#!/usr/bin/python

import os,sys,argparse,time,subprocess,json,re,pprint
import re
from datetime import *
import datetime
import time

import os
import fnmatch
import csv
import commands
import sys

__author__ = "Dazhi Dong <dazhi.dong@emc.com>"
__version__ = filter(str.isdigit, "$Revision: 11 $")
__date__ = "4/13/2016"

__filename__ = os.path.basename(__file__)
__abspath__ = os.path.abspath(__file__)
              
################################
# start - common methods
################################

# validate input parameters
def argument_parser():
    """
    Parse arguments if any
    """
    parser = argparse.ArgumentParser(prog=__filename__, usage='%(prog)s [options]')
    parser.add_argument('--dc', help="Input a dc file")
    parser.add_argument('--all', help="TODO: Show analysis result for all uem jobs")
    parser.add_argument('--id', help="Show analysis result for a uem job")
    return parser
    
def _validateInputParameters(): 
    """
    Get names from arguments
    """
    res = list()
    if args.dc:
        if (os.path.isfile(args.dc) == True): 
            return True
        else: 
            print 'Error! Cannot access the dc file. '
            return False
    else: 
        print 'Error! No dc file.'
        return False
        
def execute_cmd(_cmd):     
    return commands.getstatusoutput(_cmd)

def create_folder(folder):
        if (os.path.isdir(folder) == False): 
            os.mkdir(folder, 0766)

CEMTRACER_DATE          = "date"
CEMTRACER_MS            = "ms"
CEMTRACER_US            = "us"
CEMTRACER_PROVIDERNAME  = "ProviderName"
CEMTRACER_LEVEL         = "Level"
CEMTRACER_PROCESSID     = "processid"
CEMTRACER_THREADID      = "threadid"
CEMTRACER_METHOD        = "method"
CEMTRACER_FILE          = "file"
CEMTRACER_LINE          = "line"
CEMTRACER_LOG           = "log"

def cem_log_parse(line): 
    cemLogMarker = re.compile(r"^(?P<date>\d{2} \w{3} \d{4} \d{2}:\d{2}:\d{2})  - \[(?P<ProviderName>.*)\] (?P<Level>\w+) - \{(\d+):(?P<ms>\d+):(?P<us>\d+)\}\[(?P<processid>\d+)\|(?P<threadid>\d+)\|(?P<threadid2>\w+)\]\[(?P<method>\w+) @ (?P<file>.*):(?P<line>\d+)\] (?P<log>.*)")
    s = cemLogMarker.match(line)
    if s: 
        cemLogInfo = {}
        cemLogInfo[CEMTRACER_DATE]          = s.group(CEMTRACER_DATE)
        cemLogInfo[CEMTRACER_MS]            = s.group(CEMTRACER_MS)
        cemLogInfo[CEMTRACER_US]            = s.group(CEMTRACER_US)
        cemLogInfo[CEMTRACER_PROVIDERNAME]  = s.group(CEMTRACER_PROVIDERNAME)
        cemLogInfo[CEMTRACER_LEVEL]         = s.group(CEMTRACER_LEVEL)
        cemLogInfo[CEMTRACER_PROCESSID]     = s.group(CEMTRACER_PROCESSID)
        cemLogInfo[CEMTRACER_THREADID]      = s.group(CEMTRACER_THREADID)
        cemLogInfo[CEMTRACER_METHOD]        = s.group(CEMTRACER_METHOD)
        cemLogInfo[CEMTRACER_FILE]          = s.group(CEMTRACER_FILE)
        cemLogInfo[CEMTRACER_LINE]          = s.group(CEMTRACER_LINE)
        cemLogInfo[CEMTRACER_LOG]           = s.group(CEMTRACER_LOG)
        return [True, cemLogInfo]

    else: 
        return [False, None]
    
def uem_job_call_graph_log_list_sort(a, b): 
    # jobLogDateTime, jobLogUsLong, line
    if a[0] > b[0]: 
        return 1
    if a[0] < b[0]: 
        return -1
    if a[1] > b[1]: 
        return 1
    if a[1] < b[1]: 
        return -1
    else: 
        return 0
        
def uem_job_state_transaction_list_sort(a, b): 
    # uemJob._jobStateTransitionList.append([spIdStr, jobLogDateTime, jobLogUsLong, jobIdStr, jobStateStr])
    if a[1] > b[1]: 
        return 1
    if a[1] < b[1]: 
        return -1
    if a[2] > b[2]: 
        return 1
    if a[2] < b[2]: 
        return -1
    else: 
        return 0

def uemjob_execution_error_list_sort(a, b): 
    # uemJob._executionErrorList.append([jobThreadId, jobLogDateTime, jobLogUsLong, taskIndexStr, taskNameStr, executionErrorStr, jobLogStr])
    if a[1] > b[1]: 
        return 1
    if a[1] < b[1]: 
        return -1
    if a[2] > b[2]: 
        return 1
    if a[2] < b[2]: 
        return -1
    else: 
        return 0

def uemjob_callback_list_sort(a, b): 
    # uemJob._callbackList.append([jobThreadId, jobLogDateTime, jobLogUsLong, callbackName, jobLogStr])
    if a[1] > b[1]: 
        return 1
    if a[1] < b[1]: 
        return -1
    if a[2] > b[2]: 
        return 1
    if a[2] < b[2]: 
        return -1
    else: 
        return 0

def uemjob_callGraphSegment_list_sort(a, b): 
    if a._OnThreadStartTime != None or b._OnThreadStartTime != None: 
        if a._OnThreadStartTime > b._OnThreadStartTime: 
            return 1
        if a._OnThreadStartTime < b._OnThreadStartTime: 
            return -1
        if a._OnThreadStartTimeUs > b._OnThreadStartTimeUs: 
            return 1
        if a._OnThreadStartTimeUs < b._OnThreadStartTimeUs: 
            return -1
        else: 
            return 0
    else: 
        print 'Warning, not start time, cannot sort from uemjob_callGraphSegment_list_sort method, return 0...'
        return 0
        
def uemjob_task_status_transaction_list_sort(a, b): 
    # uemJob._jobTaskStatusTransitionList.append([taskNo, oldStatus, newStatus, jobLogDateTime, jobLogUsLong])
    if a[0] > b[0]: 
        return 1
    if a[0] < b[0]: 
        return -1
    if a[3] > b[3]: 
        return 1
    if a[3] < b[3]: 
        return -1
    if a[4] > b[4]: 
        return 1
    if a[4] < b[4]: 
        return -1
    return 0
################################
# end   - common methods
################################

parser = argument_parser()
args = parser.parse_args()

class Logger:
    def __init__(self, name):
        self._handler = open(name, 'w')

    def __del__(self):
        self._handler.close()

    def write(self, *lines):
        for line in lines:
            self._handler.write(line + '\n')
        
################################
# start - class DcExtractor
################################
class DcExtractor: 
    def __init__(self, dcFile):
        """
        Initialize a dc extractor.        
        """
        self._dcFile = dcFile
        
        self.spaLogFileRoot         = None
        self.spbLogFileRoot         = None
        self.jobTmpSpa              = None
        self.jobTmpSpb              = None
        self.jobOutFolder           = None
        self._jobFolder             = None
        self.jobOutSpa              = None
        self.jobOutSpb              = None
        self._db_reports_folder     = None
        
    def get_log_root_folder(self, dcFileFolder, spId): 
        spLogFileRoot = os.path.join(dcFileFolder, spId)
        if (os.path.isdir(spLogFileRoot) == False): 
            return None; 
        if (os.path.isdir(os.path.join(spLogFileRoot, 'mnt/ssdroot')) == True): 
            spLogFileRoot = os.path.join(spLogFileRoot, 'mnt/ssdroot')
                
        return spLogFileRoot
            
    def create_job_tmp_out_folders(self, dcFileFolder): 
        # create job folder
        self._jobFolder = os.path.join(dcFileFolder, 'job')
        create_folder(self._jobFolder)

        # create job tmp folder
        self.jobTmpFolder = os.path.join(self._jobFolder, 'tmp')
        create_folder(self.jobTmpFolder)
        # create job output folder
        self.jobOutFolder = os.path.join(self._jobFolder, 'out')
        create_folder(self.jobOutFolder)
        
        # create job tmp and out folders for spa
        if (self.spaLogFileRoot != None): 
            self.jobTmpSpa = os.path.join(self.jobTmpFolder, 'spa')
            create_folder(self.jobTmpSpa)
            self.jobOutSpa = os.path.join(self.jobOutFolder, 'spa')
            create_folder(self.jobOutSpa)
            
        # create job tmp and out folders for spb
        if (self.spbLogFileRoot != None): 
            self.jobTmpSpb = os.path.join(self.jobTmpFolder, 'spb')
            create_folder(self.jobTmpSpb)
            self.jobOutSpb = os.path.join(self.jobOutFolder, 'spb')
            create_folder(self.jobOutSpb)

    def copy_original_log_to_tmp(self): 
        if (self.spaLogFileRoot != None): 
            originalLogPath = os.path.join(self.spaLogFileRoot, 'EMC/CEM/log')
            if (os.path.isdir(originalLogPath) == True): 
                execute_cmd("cp -rf " + originalLogPath + '/* ' + self.jobTmpSpa)
                
        if (self.spbLogFileRoot != None): 
            originalLogPath = os.path.join(self.spbLogFileRoot, 'EMC/CEM/log')
            if (os.path.isdir(originalLogPath) == True): 
                execute_cmd("cp -rf " + originalLogPath + '/* ' + self.jobTmpSpb)

    def unzip_tmp_log_file(self, tmpFolder): 
        fileList = os.listdir(tmpFolder)
        for f in fileList: 
            fileName, extName = os.path.splitext(f)
            if extName == '.gz': 
                (status, output) = execute_cmd('gunzip -f ' + os.path.join(tmpFolder, f))
                if status != 0: 
                    print 'unzip file ' + os.path.join(tmpFolder, f) + ' error, but will continue with next file!'
                    # print output
                
    def unzip_tmp_log_files(self): 
        if (self.spaLogFileRoot != None): 
            self.unzip_tmp_log_file(self.jobTmpSpa)
        if (self.spbLogFileRoot != None): 
            self.unzip_tmp_log_file(self.jobTmpSpb)
            
    def get_cemtracer_log_file_start_time(self, fileName): 
        if os.path.isfile(fileName): 
            fd = open(fileName)
            while True: 
                line = fd.readline()
                if len(line) == 0 : 
                    break;
                line = re.sub(r'\n', "", line)
                ret = re.search(r'\d{2} \w{3} \d{4} \d{2}:\d{2}:\d{2}', line)
                if ret:
                    startTime = datetime.datetime.strptime(ret.group(0), "%d %b %Y %H:%M:%S")
                    return [True, startTime]
                else: 
                    continue
            fd.close()
        return [False, None]
    
    def merge_tmp_log_files_into_out_folder_common(self, tmpFolder, outFolder): 
        fileList = os.listdir(tmpFolder)
        
        # get "cemtracer_xxx.log" files from tmp folder. 
        cemtracer_log_list = []            
        for f in fileList: 
            fName, extName = os.path.splitext(f)                
            if fName.startswith('cemtracer') and extName == '.log': 
                cemtracer_log_list.append(f)
        
        # merge and generate out files: 
        for cemtracerX in cemtracer_log_list: 
            log_list = []
            for f in fileList: 
                if f.startswith(cemtracerX): 
                    fileName = os.path.join(tmpFolder, f)
                    [Found, logFileStarttime] = self.get_cemtracer_log_file_start_time(fileName)
                    if Found == True: 
                        log_list.append([f, logFileStarttime])
            
            # sort the log list by time: 
            log_list.sort(lambda x,y:cmp(x[1],y[1]))
            
            # remove output file
            outputFile = os.path.join(outFolder, cemtracerX)
            execute_cmd("rm -rf " + outputFile)
            
            # touch output file
            execute_cmd("touch " + outputFile)
            
            # generate output file
            for l in log_list: 
                logName = os.path.join(tmpFolder, l[0])
                execute_cmd("cat " + logName + " >> " + outputFile)
    
    def merge_tmp_log_files_into_out_folder(self): 
        if (self.spaLogFileRoot != None): 
            self.merge_tmp_log_files_into_out_folder_common(self.jobTmpSpa, self.jobOutSpa)
                
        if (self.spbLogFileRoot != None): 
            self.merge_tmp_log_files_into_out_folder_common(self.jobTmpSpb, self.jobOutSpb)
            
        
    def run(self): 
        # (1) check the folder, if not exist, untar the dc file. 
        dcFileDirName = os.path.dirname(self._dcFile)
        dcFileBaseName = os.path.basename(self._dcFile)
        dcFileName, dcFileExt = os.path.splitext(dcFileBaseName)
        dcFileFolder = os.path.join(dcFileDirName, dcFileName)
        
        # Step [1]: if the dc file folder doesn't exist, create it.
        if (os.path.isdir(dcFileFolder) == False): 
            # untar the dc file:
            execute_cmd("tar xvf " + self._dcFile + " -C " + dcFileDirName) 
    
        # after extract the tar file, check whether the folder exist, if not, return False, cann't continue...
        if (os.path.isdir(dcFileFolder) == False):
            print 'dc file extract error.'
            return False
            
        # Step [2] check the .tgz files for both spa and spb: "spa.service_dc.tgz" and "spb.service_dc.tgz"
        spaTgzFile = os.path.join(dcFileFolder, "spa.service_dc.tgz")
        spbTgzFile = os.path.join(dcFileFolder, "spb.service_dc.tgz")
        if (os.path.isfile(spaTgzFile) == True): 
            self.spaLogFileRoot = self.get_log_root_folder(dcFileFolder, 'spa'); 
            if (self.spaLogFileRoot == None): 
                execute_cmd("tar zxvf " + spaTgzFile + " -C " + dcFileFolder)
                
        if (os.path.isfile(spbTgzFile) == True): 
            self.spbLogFileRoot = self.get_log_root_folder(dcFileFolder, 'spb'); 
            if (self.spbLogFileRoot == None): 
                execute_cmd("tar zxvf " + spbTgzFile + " -C " + dcFileFolder)

        # Validate... after extract the tgz files, check whether the folder exist, if not, return False, and cann't continue...            
        self.spaLogFileRoot = self.get_log_root_folder(dcFileFolder, 'spa'); 
        self.spbLogFileRoot = self.get_log_root_folder(dcFileFolder, 'spb'); 
        if (self.spaLogFileRoot == None and self.spbLogFileRoot == None): 
            print 'Didn\'t find log root folders from either SP, exit.'
            sys.exit()

        # check job output folder, if exists, don't do it again
        self._jobFolder = os.path.join(dcFileFolder, 'job')
        self.jobOutFolder = os.path.join(self._jobFolder, 'out')
        if os.path.isdir(self.jobOutFolder) == False: 
            # Step [3]: create job tmp and out folders
            self.create_job_tmp_out_folders(dcFileFolder)
            
            # Step [4]: copy original logs to tmp folders
            self.copy_original_log_to_tmp()
            
            # Step [5]: unzip gz files from tmp folders
            self.unzip_tmp_log_files()
            
            # Step [6]: merge tmp log files into out folders. 
            self.merge_tmp_log_files_into_out_folder()
        else: 
            # set job tmp folder
            self.jobTmpFolder = os.path.join(self._jobFolder, 'tmp')
            # set job output folder
            self.jobOutFolder = os.path.join(self._jobFolder, 'out')
            
            # set job tmp and out folders for spa
            if (self.spaLogFileRoot != None): 
                self.jobTmpSpa = os.path.join(self.jobTmpFolder, 'spa')
                self.jobOutSpa = os.path.join(self.jobOutFolder, 'spa')
                
            # set job tmp and out folders for spb
            if (self.spbLogFileRoot != None): 
                self.jobTmpSpb = os.path.join(self.jobTmpFolder, 'spb')
                self.jobOutSpb = os.path.join(self.jobOutFolder, 'spb')

        # check job db_reports folder, if exists, don't do it again
        self._db_reports_folder = os.path.join(self.jobOutFolder, 'db_reports'); 
        if os.path.isdir(self._db_reports_folder) == False: 
            jobDbTgzFile = os.path.join(self.spaLogFileRoot, 'cmd_outputs/logDaemon/pgsql/db_reports.tgz')
            if os.path.isfile(jobDbTgzFile) == False: 
                jobDbTgzFile = os.path.join(self.spbLogFileRoot, 'cmd_outputs/logDaemon/pgsql/db_reports.tgz')
            if os.path.isfile(jobDbTgzFile) == False: 
                print 'Warning, didn\'t find job db_reports.tgz file from either SP. '
                self._db_reports_folder = None
            else:                
                execute_cmd("tar zxvf " + jobDbTgzFile + " -C " + self.jobOutFolder)
        
        return True
################################
# end - class DcExtractor
################################

################################
# start - class UemJobProcessingSegment
################################
class UemJobProcessingSegment: 
    def __init__(self, startTime, startTimeUs, startThreadId, startLog): 
        self._startTime = startTime
        self._startTimeUs = startTimeUs
        self._startThreadId = startThreadId
        self._startLog = startLog
        
        self._stopTime = None
        self._stopTimeUs = None
        self._stopThreadId = None
        self._stopLog = None
        
################################
# end - class UemJobProcessingSegment
################################

################################
# start - class UemJobCallGraphSegment
################################
class UemJobCallGraphSegment: 
    def __init__(self, jobIdStr, spIdStr, ecomProcessId, onThreadStartTime, onThreadStartTimeUs, onThreadStartThreadId): 
        self._jobIdStr              = jobIdStr
        self._spIdStr               = spIdStr
        self._ecomProcessId         = ecomProcessId
        
        self._OnThreadStartTime     = onThreadStartTime
        self._OnThreadStartTimeUs   = onThreadStartTimeUs
        self._OnThreadStartThreadId = onThreadStartThreadId
        
        self._OnThreadExitTime      = None
        self._OnThreadExitTimeUs    = None
        self._OnThreadExitThreadId  = None
        
        self._processingSegList     = []
        
################################
# end - class UemJobCallGraphSegment
################################

################################
# start - class UemJob
################################
class UemJob: 
    def __init__(self, jobIdStr): 
        self._jobIdStr          = jobIdStr
        self._jobBatchJobIdStr  = ""
        self._jobNumOfTasks     = 0
        self._jobNameStr        = ""
        
        self._jobTasks          = {}
        
        self._jobSubmitSpIdStr  = None
        self._jobSubmitTime     = None
        self._jobSubmitTimeUs   = None
        self._jobSubmitThreadId = None
        
        self._jobStateTransitionList = []
        self._jobTaskStatusTransitionList = []
        self._jobScalableFlag = False
        # execution error list: 
        self._executionErrorList = []
        # callback list: 
        self._callbackList = []
        
        # flag of running with ECOM restarts
        self._runningWithECOMRestarts = False
        
        # job processing call graph, key is 'spId_ECOMprocessID'
        self._uemJobCallGraphSegmentList = []
        
        # job final state
        self._jobFinalState = ''
        
        
        
        
        self._jobOnThreadStartTime = []
        self._jobOnThreadStartTimeUs = []
        self._jobOnThreadStartThreadId = []        
        self._jobOnThreadExitTime = []
        self._jobOnThreadExitTimeUs = []
        self._jobOnThreadExitThreadId = []
        
        self._jobExecutionList = []
        
        
################################
# end - class UemJob
################################

################################
# start - class UemJobTaskDbInfo
################################
class UemJobTaskDbInfo: 
    def __init__(self, 
                 groupid	    , 
                 jobinstanceid	, 
                 percentcomplete, 
                 recoverypolicy	, 
                 rollbackpolicy	, 
                 taskidentifier	, 
                 taskindex	    , 
                 taskname	    , 
                 taskweight): 
        self._groupid	  = groupid	
        self._jobinstanceid	  = jobinstanceid	
        self._percentcomplete  = percentcomplete
        self._recoverypolicy	  = recoverypolicy	
        self._rollbackpolicy	  = rollbackpolicy	
        self._taskidentifier	  = taskidentifier	
        self._taskindex	  = taskindex	         
        self._taskname	  = taskname	         
        self._taskweight  = taskweight

    def display_uemjobtaskdbinfo(self): 
        print self._jobinstanceid, self._taskindex, self._taskname
################################
# end - class UemJobTaskDbInfo
################################

################################
# start - class UemJobDbInfo
################################
class UemJobDbInfo: 
    def __init__(self,
                 applicationabortflag	     ,
                 childinstanceid	         ,
                 clientdata	                 ,
                 clientprovidersessionname	 ,
                 currenttaskindex	         ,
                 currenttaskname	         ,
                 emcextendederrorcode	     ,
                 emcextendederrordescription,
                 emcextendedrollbackerrorcode,
                 errortaskindex	             ,
                 estimatedweight	         ,
                 hidden	                     ,
                 instanceid	                 ,
                 jobattributes	             ,
                 jobstate	                 ,
                 jobsubstate	             ,
                 name	                     ,
                 numberoftasks	             ,
                 originalpriority	         ,
                 owner	                     ,
                 parentinstanceid	         ,
                 percentcomplete	         ,
                 presuspendrundirection	     ,
                 priority	                 ,
                 recoverymode	             ,
                 recoverypolicy	             ,
                 requestid		             ,
                 starttime	                 ,
                 timeoflaststatechange	     ,
                 timesubmitted	             ,
                 userid): 
        self._applicationabortflag	         = applicationabortflag	     
        self._childinstanceid	             = childinstanceid	             
        self._clientdata	                 = clientdata	                 
        self._clientprovidersessionname	     = clientprovidersessionname	 
        self._currenttaskindex	             = currenttaskindex	         
        self._currenttaskname	             = currenttaskname	             
        self._emcextendederrorcode	         = emcextendederrorcode	     
        self._emcextendederrordescription    = emcextendederrordescription  
        self._emcextendedrollbackerrorcode   = emcextendedrollbackerrorcode 
        self._errortaskindex	             = errortaskindex	             
        self._estimatedweight	             = estimatedweight	             
        self._hidden	                     = hidden	                     
        self._instanceid	                 = instanceid	                 
        self._jobattributes	                 = jobattributes	             
        self._jobstate	                     = jobstate	                 
        self._jobsubstate	                 = jobsubstate	                 
        self._name	                         = name	                     
        self._numberoftasks	                 = numberoftasks	             
        self._originalpriority	             = originalpriority	         
        self._owner	                         = owner	                     
        self._parentinstanceid	             = parentinstanceid	         
        self._percentcomplete	             = percentcomplete	             
        self._presuspendrundirection	     = presuspendrundirection	     
        self._priority	                     = priority	                 
        self._recoverymode	                 = recoverymode	             
        self._recoverypolicy	             = recoverypolicy	             
        self._requestid		                 = requestid		             
        self._starttime	                     = starttime	                 
        self._timeoflaststatechange	         = timeoflaststatechange	     
        self._timesubmitted	                 = timesubmitted	             
        self._userid                         = userid
        self._uemJobTaskDbInfoList           = []

    def display_job_info(self): 
        print self._instanceid, self._jobstate
################################
# end - class UemJobDbInfo
################################

################################
# start - class UemJobParser
################################
class UemJobParser: 
    def __init__(self, jobFolder, jobOutFolder, jobOutSpa, jobOutSpb, db_reports_folder):
        """
        Initialize a uem job parser
        """
        self._jobFolder = jobFolder
        self._jobOutFolder = jobOutFolder
        self._jobOutSpa = jobOutSpa
        self._jobOutSpb = jobOutSpb
        
        # create result folder
        self._resultFolder = os.path.join(self._jobFolder, 'result')
        create_folder(self._resultFolder)
        
        self._uemJobDictSpa = {}
        self._uemJobIdListSpa = []
        self._uemJobDictSpb = {}
        self._uemJobIdListSpb = []
        
        self._uemJobDict = {}
        self._uemJobIdList = []
        
        self._uemJobAbnormalList = []
        self._uemJobScalableList = []
        
        self._batchJobDict = {}
        self._batchJobList = []
        
        # summary file
        self._summaryFile = ''
        
        # call graph file list:
        self._callGraphFileList = []
        
        # Uem Job/Task tables info
        self._db_reports_folder = db_reports_folder
        self._job_csv_file = os.path.join(self._db_reports_folder, 'emc_uem_transactionjobleaf.csv')
        self._task_csv_file = os.path.join(self._db_reports_folder, 'emc_uem_persistenttaskcontainerleaf.csv')
        self._uemJobDbInfoDict = {}
        self._uemJobDbIdList = []
        self._numOfJobs = 0
        self._numOfTasks = 0
        
        self._new_job_list            = []
        self._starting_job_list       = []
        self._running_job_list        = []
        self._suspended_job_list      = []
        self._completed_job_list      = []
        self._terminated_job_list     = []
        self._killed_job_list         = []
        self._exception_job_list      = []
        self._rollingback_job_list    = []
        self._unknown_state_job_list  = []
        
        self._numOfJobTaskRelatedTables     = 0
        self._jobRelatedTableFiles          = []
        self._jobTaskRelatedTableFiles      = []
        self._jobRelatedArrayTableFiles     = []
        self._jobTaskRelatedArrayTableFiles = []
        self._jobAssocTableFiles            = []
        self._jobTaskAssocTableFiles        = []

    
    def get_job_list_str(self, prefixStr, uemJobDbInfo_list): 
        job_list_str = '   ' + str(len(uemJobDbInfo_list)) + ' ' + prefixStr + ': \n'
        if len(uemJobDbInfo_list) > 0: 
            count = 1
            tmp_list = []
            for uemJobDbInfo in uemJobDbInfo_list: 
                if count % 16 != 0: 
                    tmp_list.append(uemJobDbInfo._instanceid)
                    count = count + 1
                else: 
                    tmp_list.append(uemJobDbInfo._instanceid)
                    job_list_str += '\t' + str(tmp_list) + '\n'
                    tmp_list = []
                    count = 1
            if len(tmp_list) > 0: 
                job_list_str += '\t' + str(tmp_list) + '\n'
        return job_list_str
        
    def display_job(self, uemJob, spIdStr): 
        print spIdStr + ': Job=[' + uemJob._jobIdStr + ',' + uemJob._jobNameStr + ', ' + str(uemJob._jobNumOfTasks) + ']'
        print 'Submit=[' + uemJob._jobSubmitSpIdStr + ',' + str(uemJob._jobSubmitTime) + ', ' + str(uemJob._jobSubmitTimeUs) + ', ' + str(uemJob._jobSubmitThreadId) + ']'
        if uemJob._jobScalableFlag == True: 
            print 'Is Scalable Job. ' 
        else: 
            print 'Not a Scalable Job. '
        if uemJob._runningWithECOMRestarts == True: 
            print 'Running with ECOM restarts = Yes'
        else: 
            print 'Running with ECOM restarts = No'
        
        print 'Job Final state = ' + uemJob._jobFinalState
        
        for jobState in uemJob._jobStateTransitionList: 
            print 'Job [' + uemJob._jobIdStr + '], Job State Transaction = [' + jobState[4] + ']'
        print 
        
        print uemJob._executionErrorList
        
        print 'Callbacks: '
        for callback in uemJob._callbackList: 
            print callback[4]
        
        print 'call graph segment size = ' + str(len(uemJob._uemJobCallGraphSegmentList))
    
    def generate_uemjob_analysis_result_from_timeframe_threadid(self, spIdStr, jobIdStr, ecomProcessId, threadId, startTime, startTimeUs, endTime, endTimeUs): 
        uemJobCallGraphLogList = []
        jobOutSpx = self._jobOutSpb
        if spIdStr == 'spa': 
            jobOutSpx = self._jobOutSpa

        cemtracerFileList = os.listdir(jobOutSpx)
        
        for cemtracerFile in cemtracerFileList: 
            # filter out files like: ".cemtracer_uemjobsvc.log.swp"
            (fileName, extName) = os.path.splitext(cemtracerFile)
            if extName != '.log' or fileName.startswith('cemtracer') == False: 
                print 'skip unknown log file: ' + cemtracerFile
                continue
                
            cemOutFile = os.path.join(jobOutSpx, cemtracerFile)
            cemtracerFd = open(cemOutFile, "r")
            while True:
                line = cemtracerFd.readline() 
                if len(line) == 0 : 
                    break; 
                line = re.sub(r'\n', "", line)
                [ret, cemLogInfo] = cem_log_parse(line)
                if ret == True: 
                    jobLogStr   = cemLogInfo[CEMTRACER_LOG]
                    jobLogDateTime  = datetime.datetime.strptime(cemLogInfo[CEMTRACER_DATE], '%d %b %Y %H:%M:%S')
                    jobLogMs    = cemLogInfo[CEMTRACER_MS]
                    jobLogUs    = cemLogInfo[CEMTRACER_US]
                    jobLogecomProcessId = long(cemLogInfo[CEMTRACER_PROCESSID])
                    jobThreadId = long(cemLogInfo[CEMTRACER_THREADID])
                    jobLogUsLong = long(jobLogMs)*1000000 + long(jobLogUs)/1000
                    
                    if threadId == jobThreadId and ecomProcessId == jobLogecomProcessId: 
                        if endTime != None: 
                            if ((startTime < jobLogDateTime) or (startTime == jobLogDateTime and startTimeUs <= jobLogUsLong)) and ((endTime > jobLogDateTime) or (endTime == jobLogDateTime and endTimeUs >= jobLogUsLong)): 
                                uemJobCallGraphLogList.append([jobLogDateTime, jobLogUsLong, line])
                                # print 'TEST: append line'
                                # print 'TEST: ', line
                        elif ((startTime < jobLogDateTime) or (startTime == jobLogDateTime and startTimeUs <= jobLogUsLong)): 
                            uemJobCallGraphLogList.append([jobLogDateTime, jobLogUsLong, line])
            cemtracerFd.close()
        return uemJobCallGraphLogList; 
        
    def generate_uemjob_analysis_result_from_callGraphSegment(self, jobIdStr, uemJobCallGraphSegment): 
        uemJobCallGraphLogListAll = []
        ecomProcessId   = uemJobCallGraphSegment._ecomProcessId
        index = 0
        processingSegListSize = len(uemJobCallGraphSegment._processingSegList)
        for processingSeg in uemJobCallGraphSegment._processingSegList: 
            threadId    = processingSeg._startThreadId
            startTime   = processingSeg._startTime
            startTimeUs = processingSeg._startTimeUs                        
            endTime     = processingSeg._stopTime
            endTimeUs   = processingSeg._stopTimeUs
            index = index + 1
            # print 'processing Seg ' + str(index) + ', total seg = ' + str(processingSegListSize) +', threadId=' + str(threadId) + ', time=[' +  startTime.strftime('%Y-%m-%d %H:%M:%S') + ' ~ ' + endTime.strftime('%Y-%m-%d %H:%M:%S') + ']'
            
            # last one, check OnThreadExit...
            if index == processingSegListSize and processingSeg._stopThreadId == uemJobCallGraphSegment._OnThreadExitThreadId: 
                if endTime != None and uemJobCallGraphSegment._OnThreadExitTime != None and (endTime < uemJobCallGraphSegment._OnThreadExitTime or (endTime == uemJobCallGraphSegment._OnThreadExitTime and endTimeUs < uemJobCallGraphSegment._OnThreadExitTimeUs)): 
                    endTime         = uemJobCallGraphSegment._OnThreadExitTime
                    endTimeUs       = uemJobCallGraphSegment._OnThreadExitTimeUs            
            uemJobCallGraphLogList = self.generate_uemjob_analysis_result_from_timeframe_threadid(uemJobCallGraphSegment._spIdStr, uemJobCallGraphSegment._jobIdStr, ecomProcessId, threadId, startTime, startTimeUs, endTime, endTimeUs)
            for uemJobCallGraphLog in uemJobCallGraphLogList: 
                uemJobCallGraphLogListAll.append(uemJobCallGraphLog)
        
        # Sort uemJobCallGraphLogList ...
        uemJobCallGraphLogListAll.sort(uem_job_call_graph_log_list_sort)
        return uemJobCallGraphLogListAll
        
    def generate_uemjob_analysis_result(self, uemJob): 
        jobIdStr = uemJob._jobIdStr       
        
        """
        lenJobCallGraphList = len(uemJob._uemJobCallGraphSegmentList)
        if lenJobCallGraphList < 1: 
            print 'Warning: Job ' + jobIdStr + ' didn\'t have a full execution log, return and ignore...'
            return
        """
        
        print 'Start generating execution log for job ' + jobIdStr
        callGraphGenerationStartTime = datetime.datetime.now() 
            
        uemJobCallGraphFile = os.path.join(self._resultFolder, jobIdStr + '_AnalysisResult.txt')
        fd = open(uemJobCallGraphFile, "w")
        fd.write('1. This is a summary for job ' + jobIdStr + '\n')
        fd.write(self.get_single_job_summary_str(uemJob))
        fd.write('\n')
        fd.write('2. This is the information from CP DB tables for Uem Job ' + jobIdStr + '\n')
        fd.write(self.get_single_job_task_db_info_str(uemJob._jobIdStr))
        fd.write('\n')
        fd.write('\n')
        fd.write("3. This is execution log for job " + jobIdStr + '\n')
        for uemJobCallGraphSegment in uemJob._uemJobCallGraphSegmentList: 
            uemJobCallGraphLogList = self.generate_uemjob_analysis_result_from_callGraphSegment(jobIdStr, uemJobCallGraphSegment)
            for cemJobCallGraphLog in uemJobCallGraphLogList: 
                fd.write(cemJobCallGraphLog[2] + '\n')
        fd.close()
        self._callGraphFileList.append(uemJobCallGraphFile)
        
        callGraphGenerationEndTime = datetime.datetime.now()
        print 'Generated execution log for job ' + jobIdStr + ' finished, ' + str((callGraphGenerationEndTime-callGraphGenerationStartTime).seconds) + ' seconds elapsed.'
            
    def generate_analysis_result(self): 
        if args.id != None: 
            # if user input a job id, just generate the analysis result for the specific uem job. 
            analysisJobIdStr = args.id
            # print 'We need only for a specific job analysis result: ' + analysisJobIdStr
            if analysisJobIdStr in self._uemJobIdList: 
                print 'Found the job ' + analysisJobIdStr + ', ready to start analysis.'
                uemJob = self._uemJobDict[analysisJobIdStr]
                self.generate_uemjob_analysis_result(uemJob)
            else: 
                print 'Didn\'t find the job ' + analysisJobIdStr
        else: 
            # by default, generate analysis result for abnormal uem jobs: 
            abnormalUemJobSize = len(self._uemJobAbnormalList)        
            index = 0
            for uemJob in self._uemJobAbnormalList: 
                # print str(index) + '. Generating callgraph for job ' + jobIdStr
                index = index + 1            
                self.generate_uemjob_analysis_result(uemJob)            
            
    def get_single_job_task_db_info_str(self, jobIdStr): 
        retStr = ''
        if jobIdStr in self._uemJobDbInfoDict.keys(): 
            uemJobDbInfo = self._uemJobDbInfoDict[jobIdStr]
            # job str
            retStr += '    Uem Job Information extracted from following DB tables: \n'
            retStr += '      ' + self._job_csv_file + '\n'
            retStr += '      ' + self._task_csv_file + '\n'
            retStr += '\n'
            retStr += '      Uem Job ' + jobIdStr + ' has ' + str(len(uemJobDbInfo._uemJobTaskDbInfoList)) + ' tasks: \n'
            retStr += '\t instanceid                    = ' + uemJobDbInfo._instanceid                               + '\n'
            retStr += '\t application abort flag        = ' + uemJobDbInfo._applicationabortflag                     + '\n'
            retStr += '\t child instance id             = ' + uemJobDbInfo._childinstanceid                          + '\n'
            retStr += '\t client data                   = ' + uemJobDbInfo._clientdata                               + '\n'
            retStr += '\t client provider session name  = ' + uemJobDbInfo._clientprovidersessionname                + '\n'
            retStr += '\t current task index            = ' + uemJobDbInfo._currenttaskindex                         + '\n'
            retStr += '\t current task name             = ' + uemJobDbInfo._currenttaskname                          + '\n'
            retStr += '\t execution error code          = ' + uemJobDbInfo._emcextendederrorcode                     + '\n'
            retStr += '\t execution error description   = ' + uemJobDbInfo._emcextendederrordescription              + '\n'
            retStr += '\t rollback error code           = ' + uemJobDbInfo._emcextendedrollbackerrorcode             + '\n'
            retStr += '\t error task index              = ' + uemJobDbInfo._errortaskindex                           + '\n'
            retStr += '\t estimated weight              = ' + uemJobDbInfo._estimatedweight                          + '\n'
            retStr += '\t hidden                        = ' + uemJobDbInfo._hidden                                   + '\n'
            retStr += '\t attributes                    = ' + uemJobDbInfo._jobattributes                            + '\n'
            jobstatestr = ''
            try: 
                if uemJobDbInfo._jobstate == '2': jobstatestr = 'New'
                elif uemJobDbInfo._jobstate == '3': jobstatestr = 'Starting'
                elif uemJobDbInfo._jobstate == '4': jobstatestr = 'Running'
                elif uemJobDbInfo._jobstate == '5': jobstatestr = 'Suspended'
                elif uemJobDbInfo._jobstate == '6': jobstatestr = 'Shutting Down'
                elif uemJobDbInfo._jobstate == '7': jobstatestr = 'Completed'
                elif uemJobDbInfo._jobstate == '8': jobstatestr = 'Terminated'
                elif uemJobDbInfo._jobstate == '9': jobstatestr = 'Killed'
                elif uemJobDbInfo._jobstate == '10': jobstatestr = 'Exception'
                elif uemJobDbInfo._jobstate == '11': jobstatestr = 'Service'
                elif uemJobDbInfo._jobstate == '12': jobstatestr = 'Query Pending'
                elif uemJobDbInfo._jobstate == '23768': jobstatestr = 'Rolling Back'
                elif long(uemJobDbInfo._jobstate) >= 13 and long(uemJobDbInfo._jobstate) <= 32767: jobstatestr = 'DMTF Reserved'
                elif long(uemJobDbInfo._jobstate) >= 32769 and long(uemJobDbInfo._jobstate) <= 65535: jobstatestr = 'Vendor Reserved'
                else: jobstatestr = 'unknown job state, check from file EMC_UEM_TransactionJob.mof or EMC_UEM_ConcreteJob.mof'
            except: 
                jobstatestr = 'unknown job state'
                print 'Warning, could not parse jobstate: ', uemJobDbInfo._jobstate
            retStr += '\t job state                     = ' + uemJobDbInfo._jobstate + ' [' + jobstatestr                                + ']\n'
            retStr += '\t job sub state                 = ' + uemJobDbInfo._jobsubstate                              + '\n'
            retStr += '\t job name                      = ' + uemJobDbInfo._name                                     + '\n'
            retStr += '\t num of tasks                  = ' + uemJobDbInfo._numberoftasks                            + '\n'
            retStr += '\t original priority             = ' + uemJobDbInfo._originalpriority                         + '\n'
            retStr += '\t owner                         = ' + uemJobDbInfo._owner                                    + '\n'
            retStr += '\t parent instance id            = ' + uemJobDbInfo._parentinstanceid                         + '\n'
            retStr += '\t percent complete              = ' + uemJobDbInfo._percentcomplete                          + '\n'
            retStr += '\t pre suspend run direction     = ' + uemJobDbInfo._presuspendrundirection                   + '\n'
            retStr += '\t priority                      = ' + uemJobDbInfo._priority                                 + '\n'
            retStr += '\t recovery mode                 = ' + uemJobDbInfo._recoverymode                             + '\n'
            recoverypolicystr = ''
            if uemJobDbInfo._recoverypolicy == '0': recoverypolicystr = 'Continue Job'
            elif uemJobDbInfo._recoverypolicy == '1': recoverypolicystr = 'Rollback Job'
            elif uemJobDbInfo._recoverypolicy == '2': recoverypolicystr = 'Suspend Job'
            elif uemJobDbInfo._recoverypolicy == '3': recoverypolicystr = 'Stop Job'
            elif uemJobDbInfo._recoverypolicy == '4': recoverypolicystr = 'Task Defined'
            else: recoverypolicystr = 'Unknown Policy, check from file EMC_UEM_TransactionJob.mof'
            retStr += '\t job recovery policy           = ' + uemJobDbInfo._recoverypolicy + ' [' + recoverypolicystr                           + ']\n'
            retStr += '\t request id                    = ' + uemJobDbInfo._requestid                                + '\n'
            retStr += '\t start time                    = ' + uemJobDbInfo._starttime                                + '\n'
            retStr += '\t time of last state change     = ' + uemJobDbInfo._timeoflaststatechange                    + '\n'
            retStr += '\t time of submit                = ' + uemJobDbInfo._timesubmitted                            + '\n'
            retStr += '\t user id                       = ' + uemJobDbInfo._userid                                   + '\n'

        
            # sort
            uemJobDbInfo._uemJobTaskDbInfoList.sort(lambda x,y:cmp(int(x._taskindex), int(y._taskindex)))
            
            # display tasks.
            uemJobTaskDbInfoListSize = len(uemJobDbInfo._uemJobTaskDbInfoList)
            uemJobTaskDbInfoIndex = 0
            for uemJobTaskDbInfo in uemJobDbInfo._uemJobTaskDbInfoList: 
                uemJobTaskDbInfoIndex = uemJobTaskDbInfoIndex + 1
                retStr += '\t [' + str(uemJobTaskDbInfoIndex) + '/' + str(uemJobTaskDbInfoListSize) + '] task\n'
                retStr += '\t\t task index              = ' + uemJobTaskDbInfo._taskindex        + '\n'
                retStr += '\t\t task group id           = ' + uemJobTaskDbInfo._groupid          + '\n'
                retStr += '\t\t task name               = ' + uemJobTaskDbInfo._taskname         + '\n'
                retStr += '\t\t task identifier         = ' + uemJobTaskDbInfo._taskidentifier   + '\n'
                retStr += '\t\t task weight             = ' + uemJobTaskDbInfo._taskweight       + '\n'
                retStr += '\t\t percent complete        = ' + uemJobTaskDbInfo._percentcomplete  + '\n'
                taskRecoverPolicyStr = ''
                if uemJobTaskDbInfo._recoverypolicy == '0': taskRecoverPolicyStr = 'Re-run'
                elif uemJobTaskDbInfo._recoverypolicy == '1': taskRecoverPolicyStr = 'Continue With Next Task'
                elif uemJobTaskDbInfo._recoverypolicy == '2': taskRecoverPolicyStr = 'Do Not Continue'
                elif uemJobTaskDbInfo._recoverypolicy == '3': taskRecoverPolicyStr = 'Rollback'
                elif uemJobTaskDbInfo._recoverypolicy == '4': taskRecoverPolicyStr = 'Suspend'
                elif uemJobTaskDbInfo._recoverypolicy == '5': taskRecoverPolicyStr = 'Retry On Restore/Restart'
                elif uemJobTaskDbInfo._recoverypolicy == '6': taskRecoverPolicyStr = 'Continue/Retry On Restart'
                elif uemJobTaskDbInfo._recoverypolicy == '7': taskRecoverPolicyStr = 'ContinueOnRestart Or RollbackOnFailure'
                else: taskRecoverPolicyStr = 'unknown tsak recovery policy, check from file EMC_UEM_Task.mof'
                retStr += '\t\t task recovery policy    = ' + uemJobTaskDbInfo._recoverypolicy + ' [' + taskRecoverPolicyStr + ']\n'
                taskRollbackPolicyStr = ''
                if uemJobTaskDbInfo._rollbackpolicy == '0': taskRollbackPolicyStr = 'Continue'
                elif uemJobTaskDbInfo._rollbackpolicy == '1': taskRollbackPolicyStr = 'Suspend'
                elif uemJobTaskDbInfo._rollbackpolicy == '2': taskRollbackPolicyStr = 'Re-run'
                else: taskRollbackPolicyStr = 'unknown task rollback policy, check from file EMC_UEM_Task.mof'
                retStr += '\t\t task rollback policy    = ' + uemJobTaskDbInfo._rollbackpolicy + ' [' + taskRollbackPolicyStr + ']\n'
        return retStr
        
    def get_single_job_summary_str(self, uemJob): 
        retStr = '  Job [' + uemJob._jobIdStr + ']\n' + \
                 '    Job Name                          = ' + uemJob._jobNameStr + '\n' + \
                 '    Submit SP ID                      = ' + uemJob._jobSubmitSpIdStr + '\n' + \
                 '    Submit Time                       = ' + uemJob._jobSubmitTime.strftime('%Y-%m-%d %H:%M:%S') + '\n' + \
                 '    Submit Thread ID                  = ' + str(uemJob._jobSubmitThreadId) + '\n' + \
                 '    Number of Tasks                   = ' + str(uemJob._jobNumOfTasks) + "\n"
        for taskIndex in range(0, uemJob._jobNumOfTasks): 
            retStr = retStr + '                                        Task #' + str(taskIndex) + ", name = " + uemJob._jobTasks[taskIndex] + '\n'
            
        if uemJob._jobScalableFlag == True: 
            retStr = retStr + \
                 '    Scalable Job                      = Yes.\n'
        else: 
            retStr = retStr + \
                 '    Scalable Job                      = No. \n'
                 
        if len(uemJob._jobStateTransitionList) > 0:          
            retStr = retStr + \
                     '    Job State Transition Histroy      = ['
            lastUemJobState = ''
            jobStateTransactionStart = False
            for jobState in uemJob._jobStateTransitionList:
                if (jobState[4] != lastUemJobState): 
                    lastUemJobState = jobState[4]
                    if jobStateTransactionStart == False: 
                        jobStateTransactionStart = True
                    else: 
                        retStr = retStr + ' ==> '
                    retStr = retStr + jobState[4]
            retStr = retStr + ']\n'
        
        if len(uemJob._jobTaskStatusTransitionList) > 0: 
            lastUemJobTaskNo = -1
            lastUemJobTaskState = ''
            for jobTask in uemJob._jobTaskStatusTransitionList: 
                    
                if (lastUemJobTaskNo != jobTask[0]): 
                    if lastUemJobTaskNo != -1: 
                        retStr = retStr + ']\n'
                    retStr = retStr + '    Task #' + str(jobTask[0]) + ' Status Transition         = ['
                    retStr = retStr + jobTask [1] + ' ==> ' + jobTask [2]
                    lastUemJobTaskNo = jobTask[0]
                    lastUemJobTaskState = jobTask[2]
                else: 
                    if lastUemJobTaskState != jobTask[1]: 
                        retStr = retStr + ' ==> ' + jobTask [1]
                        lastUemJobTaskState = jobTask [1]
                    if lastUemJobTaskState != jobTask [2]: 
                        retStr = retStr + ' ==> ' + jobTask [2]
                        lastUemJobTaskState = jobTask [2]
                
            retStr = retStr + ']\n'
            
        lenJobCallGraphList = len(uemJob._uemJobCallGraphSegmentList)    
        index = 0
        for uemJobCallGraphSegment in uemJob._uemJobCallGraphSegmentList: 
            retStr = retStr + '    Job Execution Segment #' + str(index) + ':\n'
            index = index + 1
            
            retStr = retStr + '    Job Execution SP ID               = ' + uemJobCallGraphSegment._spIdStr + '\n'
            retStr = retStr + '    Job Execution ECOM PID            = ' + str(uemJobCallGraphSegment._ecomProcessId) + '\n'
            retStr = retStr + '    Job Execution Thread ID           = ' + str(uemJobCallGraphSegment._OnThreadStartThreadId) + '\n'
            if uemJobCallGraphSegment._OnThreadExitTimeUs == None: 
                retStr = retStr + '    Job Execution Time Frame          = [ ' + str(uemJobCallGraphSegment._OnThreadStartTime) + ' ~ Not Finished...]\n'
                retStr = retStr + '    Job Execution Duration            = Not Finished...\n'
            else: 
                jobExeSec = (uemJobCallGraphSegment._OnThreadExitTimeUs - uemJobCallGraphSegment._OnThreadStartTimeUs)/1000/1000
                jobExeUs = (uemJobCallGraphSegment._OnThreadExitTimeUs - uemJobCallGraphSegment._OnThreadStartTimeUs)/1000 - jobExeSec*1000
                retStr = retStr + \
                                  '    Job Execution Time Frame          = [ ' + str(uemJobCallGraphSegment._OnThreadStartTime) + ' ~ ' + str(uemJobCallGraphSegment._OnThreadExitTime) + ' ]\n'
                retStr = retStr + \
                                  '    Job Execution Duration            = ' + str(jobExeSec) + '.' + str(jobExeUs) + ' Seconds.' + '\n'
            
            
            
        # uemJob._executionErrorList.append([jobThreadId, jobLogDateTime, jobLogUsLong, taskIndexStr, taskNameStr, executionErrorStr, jobLogStr])
        retStr = retStr + '\n'
        if len(uemJob._executionErrorList) > 0: 
            for executionError in uemJob._executionErrorList: 
                retStr = retStr + \
                     '    Execution Error of Task           = #' + executionError[3] + '\n' + \
                     '    Execution Error Task Name         = ' + executionError[4] + '\n' + \
                     '    Execution Error Time              = ' + executionError[1].strftime('%Y-%m-%d %H:%M:%S') + '\n'
                retStr = retStr + \
                     '    Execution Error Log               = \n        ' + executionError[6] + '\n'
        
        # uemJob._callbackList.append([jobThreadId, jobLogDateTime, jobLogUsLong, callbackName, jobLogStr])
        retStr = retStr + '\n'
        
        if len(uemJob._callbackList) > 0: 
            retStr = retStr + \
                     '    Callback List                     = \n'
            for callback in uemJob._callbackList: 
                retStr = retStr + '         ' + callback[6] + ', PID=' + str(callback[5]) + ', TID=' + str(callback[0]) + ', ' + callback[1].strftime('%Y-%m-%d %H:%M:%S') + ', ' + callback[4] + '\n'
            
        return retStr
        
        
    ###################################        
    # Generate Job Analysis Summary : 
    ###################################        
    def generate_job_analysis_summary(self): 
        self._summaryFile = os.path.join(self._resultFolder, 'summary')
        fd = open(self._summaryFile, "w")
        fd.write("Job Analysis Summary: " + '\n')
        
        # 1. all uem jobs from log files: 
        tmpstr = '1. Found ' + str(len(self._uemJobIdList)) + ' UEM Jobs from the following log files:'
        if self._jobOutSpa != None: 
            tmpstr += '\n   ' + os.path.join(self._jobOutSpa, 'cemtracer_uemjobsvc.log')
        if self._jobOutSpb != None: 
            tmpstr += '\n   ' + os.path.join(self._jobOutSpb, 'cemtracer_uemjobsvc.log')
            
        print tmpstr
        fd.write(tmpstr + '\n')
        
        # 1.1 scalable jobs: 
        scalableUemJobSize = len(self._uemJobScalableList)
        scalableUemJobIdListStr = '\t'
        index = 0
        for uemJob in self._uemJobScalableList:
            scalableUemJobIdListStr += uemJob._jobIdStr
            if (index + 1) % 16 == 0: 
                scalableUemJobIdListStr += '\n\t'
            elif index < scalableUemJobSize - 1: 
                scalableUemJobIdListStr += ', '
            index = index + 1
        if scalableUemJobSize > 0: 
            scalableUemJobIdListStr = '   ' + str(scalableUemJobSize) + ' scalable UEM Jobs: \n' + scalableUemJobIdListStr
            print scalableUemJobIdListStr
            fd.write(scalableUemJobIdListStr + '\n')
        
        # 1.2 running job with ECOM restarts
        runningJobWithEcomRestartList = []
        runningJobIdListStr = '\t'
        index = 0
        for jobIdStr in self._uemJobIdList: 
            uemJob = self._uemJobDict[jobIdStr]
            if uemJob._runningWithECOMRestarts == True: 
                runningJobWithEcomRestartList.append(uemJob)
                runningJobIdListStr += uemJob._jobIdStr
                if (index +1) % 16 == 0: 
                    runningJobIdListStr += '\n\t'
                else: runningJobIdListStr += ', '
                index = index + 1

        if len(runningJobWithEcomRestartList) > 0: 
            runningJobIdListStr = '   ' + str(len(runningJobWithEcomRestartList)) + ' running UEM Jobs when ECOM restarts. \n' + runningJobIdListStr + '\n'
            print runningJobIdListStr
            fd.write(runningJobIdListStr + '\n')
        
        # 1.3 batch jobs: 
        batchJobListSize = len(self._batchJobList)
        if batchJobListSize > 0: 
            batchJobStr = "   " + str(batchJobListSize) + " Batch Jobs from log files.\n"
            print batchJobStr
            fd.write(batchJobStr)
            if (len(self._batchJobList) > 0): 
                for batchJob in self._batchJobList: 
                    fd.write('\tBatch Job ID = ' + batchJob + ' has UEM Jobs = ' + str(self._batchJobDict[batchJob]) + '\n')
            fd.write('\n')
        
        # 1.5 abnormal uem jobs: 
        abnormalUemJobSize = len(self._uemJobAbnormalList)
        abnormalJobIdListStr = '\t'
        index = 0
        for uemJob in self._uemJobAbnormalList: 
            abnormalJobIdListStr += uemJob._jobIdStr + '(' + uemJob._jobFinalState + ')'
            if (index + 1) % 8 == 0: 
                abnormalJobIdListStr += '\n\t'
            elif index < abnormalUemJobSize - 1: 
                abnormalJobIdListStr += ', '
            index = index + 1
        
        if abnormalUemJobSize > 0: 
            abnormalJobIdListStr = "   " + str(abnormalUemJobSize) + " abnormal UEM Jobs: \n" + abnormalJobIdListStr
            print abnormalJobIdListStr
            fd.write(abnormalJobIdListStr + '\n')
        fd.write('\n')
        
        # generate single job summary
        index = 1
        for uemJob in self._uemJobAbnormalList: 
            fd.write('   #' + str(abnormalUemJobSize) + '-' + str(index) + ': ')
            fd.write(self.get_single_job_summary_str(uemJob))
            fd.write('\n')
            fd.write(self.get_single_job_task_db_info_str(uemJob._jobIdStr))
            fd.write('\n')
            fd.write('\n')
            index = index + 1            
        fd.write('\n')
        
        # 2. uem job/task DB information: 
        print 
        fd.write('\n')
        jobTaskDbInfoStr = '2. Found ' + str(self._numOfJobs) + ' Uem Jobs from DB Table: ' + self._job_csv_file + '\n'
        jobTaskDbInfoStr += '   Found ' + str(self._numOfTasks) + ' Uem Job Tasks from DB Table: ' + self._task_csv_file + '\n'

        if len(self._new_job_list) > 0: 
            jobTaskDbInfoStr += self.get_job_list_str('new jobs', self._new_job_list)
        if len(self._starting_job_list) > 0: 
            jobTaskDbInfoStr += self.get_job_list_str('starting jobs', self._starting_job_list)
        if len(self._running_job_list) > 0: 
            jobTaskDbInfoStr += self.get_job_list_str('running jobs', self._running_job_list)
        if len(self._suspended_job_list) > 0: 
            jobTaskDbInfoStr += self.get_job_list_str('suspended jobs', self._suspended_job_list)
        if len(self._completed_job_list) > 0: 
            jobTaskDbInfoStr += self.get_job_list_str('completed jobs', self._completed_job_list)
        if len(self._terminated_job_list) > 0: 
            jobTaskDbInfoStr += self.get_job_list_str('terminated jobs', self._terminated_job_list)
        if len(self._killed_job_list) > 0: 
            jobTaskDbInfoStr += self.get_job_list_str('killed jobs', self._killed_job_list)
        if len(self._exception_job_list) > 0: 
            jobTaskDbInfoStr += self.get_job_list_str('exception jobs', self._exception_job_list)
        if len(self._rollingback_job_list) > 0: 
            jobTaskDbInfoStr += self.get_job_list_str('rolling back jobs', self._rollingback_job_list)
        if len(self._unknown_state_job_list) > 0: 
            jobTaskDbInfoStr += self.get_job_list_str('unkonwn state jobs', self._unknown_state_job_list)
        print jobTaskDbInfoStr
        fd.write(jobTaskDbInfoStr + '\n')
        
        # close summary file fd.
        fd.close()

    def display_jobtask_db_summary(self):     
        # display job/task related tables: 
        print 'Found ', self._numOfJobTaskRelatedTables, 'Uem Job/Task Related Tables: '
        for f in self._jobRelatedTableFiles: print '\t', f
        for f in self._jobTaskRelatedTableFiles: print '\t', f
        for f in self._jobRelatedArrayTableFiles: print '\t', f
        for f in self._jobTaskRelatedArrayTableFiles: print '\t', f
        for f in self._jobAssocTableFiles: print '\t', f
        for f in self._jobTaskAssocTableFiles: print '\t', f
        print 
        
        print 'Analysis Summary from the following Uem Job/Task DB Tables: '
        print 'Uem Job DB Table             :', self._job_csv_file
        print 'Uem Job Task DB Table        :', self._task_csv_file
        print 'Uem Job Count from DB        :', self._numOfJobs
        print 'Uem Job Task Count from DB   :', self._numOfTasks
        
        print self.get_job_list_str('new jobs', self._new_job_list)
        print self.get_job_list_str('starting jobs', self._starting_job_list)
        print self.get_job_list_str('running jobs', self._running_job_list)
        print self.get_job_list_str('suspended jobs', self._suspended_job_list)
        print self.get_job_list_str('completed jobs', self._completed_job_list)
        print self.get_job_list_str('terminated jobs', self._terminated_job_list)
        print self.get_job_list_str('killed jobs', self._killed_job_list)
        print self.get_job_list_str('exception jobs', self._exception_job_list)
        print self.get_job_list_str('rolling back jobs', self._rollingback_job_list)
        if len(self._unknown_state_job_list) > 0: 
            print self.get_job_list_str('unkonwn job state jobs', self._unknown_state_job_list)
            
    def start_analyze_uem_jobtask_db_tables(self): 
        # get job/task related tables;         
        jobRelatedTableFiles            = fnmatch.filter(os.listdir(self._db_reports_folder), 'emc_uem_transactionjob*.csv')        
        jobTaskRelatedTableFiles        = fnmatch.filter(os.listdir(self._db_reports_folder), 'emc_uem_persistenttaskcontainer*.csv')
        for f in jobRelatedTableFiles: 
            self._jobRelatedTableFiles.append(os.path.join(self._db_reports_folder, f))
        for f in jobTaskRelatedTableFiles: 
            self._jobTaskRelatedTableFiles.append(os.path.join(self._db_reports_folder, f))
        # get job/task related array tables; 
        arrayFolder = os.path.join(self._db_reports_folder, 'arrays')
        jobRelatedArrayTableFiles       = fnmatch.filter(os.listdir(arrayFolder), 'emc_uem_transactionjob*.csv')
        jobTaskRelatedArrayTableFiles   = fnmatch.filter(os.listdir(arrayFolder), 'emc_uem_persistenttaskcontainer*.csv')
        for f in jobRelatedArrayTableFiles: 
            self._jobRelatedArrayTableFiles.append(os.path.join(arrayFolder, f))
        for f in jobTaskRelatedArrayTableFiles: 
            self._jobTaskRelatedArrayTableFiles.append(os.path.join(arrayFolder, f))
        # get job/task related assoc tables; 
        assocFolder = os.path.join(self._db_reports_folder, 'associations')
        jobAssocTableFiles              = fnmatch.filter(os.listdir(assocFolder), 'emc_uem_transactionjob*.csv')
        jobTaskAssocTableFiles          = fnmatch.filter(os.listdir(assocFolder), 'emc_uem_persistenttaskcontainer*.csv')
        for f in jobAssocTableFiles: 
            self._jobAssocTableFiles.append(os.path.join(assocFolder, f))
        for f in jobTaskAssocTableFiles: 
            self._jobTaskAssocTableFiles.append(os.path.join(assocFolder, f))
        self._numOfJobTaskRelatedTables = len(self._jobRelatedTableFiles) + len(self._jobTaskRelatedTableFiles) + len(self._jobRelatedArrayTableFiles) + len(self._jobTaskRelatedArrayTableFiles) + len(self._jobAssocTableFiles) + len(self._jobTaskAssocTableFiles)
        
        
        
        with open(self._job_csv_file) as f: 
            reader = csv.DictReader(f)

            for row in reader: 
                uemJobDbInfo = UemJobDbInfo(         
                        row['applicationabortflag']	       ,
                        row['childinstanceid']	           ,
                        row['clientdata']	               ,
                        row['clientprovidersessionname']	,
                        row['currenttaskindex']	           ,
                        row['currenttaskname']	           ,
                        row['emcextendederrorcode']	       ,
                        row['emcextendederrordescription'] ,
                        row['emcextendedrollbackerrorcode'],
                        row['errortaskindex']	           ,
                        row['estimatedweight']	           ,
                        row['hidden']	                   ,
                        row['instanceid']	               ,
                        row['jobattributes']	           ,
                        row['jobstate']	                   ,
                        row['jobsubstate']	               ,
                        row['name']	                       ,
                        row['numberoftasks']	           ,
                        row['originalpriority']	           ,
                        row['owner']	                   ,
                        row['parentinstanceid']	           ,
                        row['percentcomplete']	           ,
                        row['presuspendrundirection']	   ,
                        row['priority']	                   ,
                        row['recoverymode']	               ,
                        row['recoverypolicy']	           ,
                        row['requestid']                   ,
                        row['starttime']                   ,
                        row['timeoflaststatechange']       ,
                        row['timesubmitted']               ,
                        row['userid']                              
                )
                # uemJobDbInfo.display_job_info()
                self._uemJobDbInfoDict[uemJobDbInfo._instanceid] = uemJobDbInfo
                self._uemJobDbIdList.append(uemJobDbInfo._instanceid)
        
                if uemJobDbInfo._jobstate == '2': 
                    self._new_job_list.append(uemJobDbInfo)
                elif uemJobDbInfo._jobstate == '3': 
                    self._starting_job_list.append(uemJobDbInfo)
                elif uemJobDbInfo._jobstate == '4': 
                    self._running_job_list.append(uemJobDbInfo)
                elif uemJobDbInfo._jobstate == '5': 
                    self._suspended_job_list.append(uemJobDbInfo)
                elif uemJobDbInfo._jobstate == '7': 
                    self._completed_job_list.append(uemJobDbInfo)
                elif uemJobDbInfo._jobstate == '8': 
                    self._terminated_job_list.append(uemJobDbInfo)
                elif uemJobDbInfo._jobstate == '9':
                    self._killed_job_list.append(uemJobDbInfo)
                elif uemJobDbInfo._jobstate == '10':
                    self._exception_job_list.append(uemJobDbInfo)
                elif uemJobDbInfo._jobstate == '23768':
                    self._rollingback_job_list.append(uemJobDbInfo)
                else: 
                    print uemJobDbInfo._instanceid, 'unknown job state =', uemJobDbInfo._jobstate, 'check the new state (since 7/21/2016) from JobState property in EMC_UEM_TransactionJobLeaf.mof file.'
                    self._unknown_state_job_list.append(uemJobDbInfo)
        
        with open(self._task_csv_file) as csvfile: 
            reader = csv.DictReader(csvfile)
            numOfTasks = 0
            for row in reader: 
                numOfTasks = numOfTasks + 1
                uemJobTaskDbInfo = UemJobTaskDbInfo(
                         row['groupid'],
                         row['jobinstanceid'],
                         row['percentcomplete'],
                         row['recoverypolicy'],
                         row['rollbackpolicy'],
                         row['taskidentifier'],
                         row['taskindex'],
                         row['taskname'],
                         row['taskweight'])
        #        uemJobTaskDbInfo.display_uemjobtaskdbinfo()
                uemJobDbInfo = self._uemJobDbInfoDict[uemJobTaskDbInfo._jobinstanceid]
                if uemJobDbInfo != None: 
                    uemJobDbInfo._uemJobTaskDbInfoList.append(uemJobTaskDbInfo)
                    
        self._numOfJobs = len(self._uemJobDbInfoDict)
        self._numOfTasks = numOfTasks
        
    def start_analyze(self, uemJobLogFile, spIdStr): 
        fd = file(uemJobLogFile, 'r')
        
        uemJobDict = {}
        uemJobIdList = []
        
        lineCount = 0
        postStartingFlag = False
        while True:
            line = fd.readline() 
            if len(line) == 0 : 
                # print 'log file ' + uemJobLogFile + " has " + str(lineCount) + " lines."
                break; 
            line = re.sub(r'\n', "", line)
            
            [ret, cemLogInfo] = cem_log_parse(line)
            if ret == True: 
                jobLogStr   = cemLogInfo[CEMTRACER_LOG]
                jobLogDateTime  = datetime.datetime.strptime(cemLogInfo[CEMTRACER_DATE], '%d %b %Y %H:%M:%S')
                jobLogMs    = cemLogInfo[CEMTRACER_MS]
                jobLogUs    = cemLogInfo[CEMTRACER_US]
                ecomProcessId = long(cemLogInfo[CEMTRACER_PROCESSID])
                jobThreadId = long(cemLogInfo[CEMTRACER_THREADID])
                jobLogUsLong = long(jobLogMs)*1000000 + long(jobLogUs)/1000
                
                # print 'found log: ' + jobLogStr
                if jobLogStr == 'Post_startup JobServiceStartup::run exit': 
                    # print str(jobLogDateTime) + ': ' + jobLogStr
                    postStartingFlag = False
                if jobLogStr == 'Post_startup JobServiceStartup::run': 
                    # print str(jobLogDateTime) + ': ' + jobLogStr
                    postStartingFlag = True
                
                # submit
                # (1) init job N-2 start,jobName=job.snapshotschedule.job.CreateSnapshotSchedule,jobState=New,numOfTasks=1,CurrentTaskIndex=0
                # (2) init job N-50 start,jobName=job.uisconfig.job.DeleteUser,jobState=New,numOfTasks=1,requestID=#RestBatchRequest_JEID-16_StepN-userStep6,CurrentTaskIndex=0
                s = re.search(r'init job (?P<jobIdStr>.*) start,jobName=(?P<jobNameStr>.*),jobState=(?P<jobStateStr>New|Running),numOfTasks=(?P<numOfTasks>\d+),CurrentTaskIndex=(?P<CurrentTaskIndex>\d+)', jobLogStr)
                if s: 
                    jobIdStr = s.group("jobIdStr")
                    jobNameStr = s.group("jobNameStr")
                    jobStateStr = s.group("jobStateStr")
                    numOfTasks = int(s.group("numOfTasks"))
                    CurrentTaskIndex = s.group("CurrentTaskIndex")
                    
                    # if jobStateStr == 'Running': 
                    #     print 'init job = ' + jobIdStr + ', name = ' + jobNameStr + ', numOfTasks = ' + str(numOfTasks) + ', CurrentTaskIndex = ' + CurrentTaskIndex
                    
                    if jobIdStr in uemJobDict: 
                        uemJob = uemJobDict[jobIdStr]
                    else: 
                        uemJob = UemJob(jobIdStr)
                        uemJobDict[jobIdStr] = uemJob
                        uemJobIdList.append(jobIdStr)
                        uemJob._jobIdStr = jobIdStr
                        uemJob._jobNumOfTasks = numOfTasks
                        uemJob._jobNameStr = jobNameStr
                        uemJob._jobSubmitSpIdStr = spIdStr
                        uemJob._jobSubmitTime = jobLogDateTime
                        uemJob._jobSubmitTimeUs = jobLogUsLong
                        uemJob._jobSubmitThreadId = jobThreadId

                    uemJob._jobStateTransitionList.append([spIdStr, jobLogDateTime, jobLogUsLong, jobIdStr, jobStateStr])
                    
                    if postStartingFlag == True: 
                        # job was running with ECOM restarts...
                        uemJob._runningWithECOMRestarts = True
                else: 
                    s = re.search(r'init job (?P<jobIdStr>.*) start,jobName=(?P<jobNameStr>.*),jobState=(?P<jobStateStr>New|Running),numOfTasks=(?P<numOfTasks>\d+),requestID=(?P<requestID>.*),CurrentTaskIndex=(?P<CurrentTaskIndex>\d+)', jobLogStr)
                    if s: 
                        jobIdStr = s.group("jobIdStr")
                        jobNameStr = s.group("jobNameStr")
                        jobStateStr = s.group("jobStateStr")
                        numOfTasks = int(s.group("numOfTasks"))
                        CurrentTaskIndex = s.group("CurrentTaskIndex")
                        requestID = s.group("requestID")
                        
                        BatchJobIdStr = ""
                        s = re.search(r'#RestBatchRequest_JEID-(\d+)_.*', requestID)
                        if s: 
                            BatchJobId = s.group(1)
                            BatchJobIdStr = 'B-' + str(BatchJobId)
                        
                        # if jobStateStr == 'Running': 
                        #     print 'New job = ' + jobIdStr + ', name = ' + jobNameStr + ', numOfTasks = ' + str(numOfTasks) + ', CurrentTaskIndex = ' + CurrentTaskIndex + ', requestID = ' + requestID + ', batch job Id = ' + BatchJobIdStr
                           
                        if jobIdStr in uemJobDict: 
                            uemJob = uemJobDict[jobIdStr]
                        else: 
                            uemJob = UemJob(jobIdStr)
                            uemJobDict[jobIdStr] = uemJob
                            uemJobIdList.append(jobIdStr)                            
                            uemJob._jobIdStr = jobIdStr
                            uemJob._jobNumOfTasks = numOfTasks
                            uemJob._jobNameStr = jobNameStr
                            uemJob._jobBatchJobIdStr = BatchJobIdStr
                            uemJob._jobSubmitSpIdStr = spIdStr
                            uemJob._jobSubmitTime = jobLogDateTime
                            uemJob._jobSubmitTimeUs = jobLogUsLong
                            uemJob._jobSubmitThreadId = jobThreadId
                        uemJob._jobStateTransitionList.append([spIdStr, jobLogDateTime, jobLogUsLong, jobIdStr, jobStateStr])
                        if postStartingFlag == True: 
                            # job was running with ECOM restarts...
                            uemJob._runningWithECOMRestarts = True
    
                    
                # init job N-7, #0 task name = EMC_UEM_Task_CreateMappedStoragePoolLeaf_1
                # init job N-7, #1 task name = EMC_UEM_Task_EnableFASTVPScheduleLeaf_2
                # get task list...
                taskMarker = re.compile(r"^init job (?P<jobIdStr>.*), #(?P<taskNo>\d+) task name = (?P<taskName>.*)")
                s = taskMarker.match(jobLogStr)
                if s: 
                    jobIdStr = s.group("jobIdStr")
                    taskNo = int(s.group("taskNo"))
                    taskName = s.group("taskName")
                    if jobIdStr in uemJobDict: 
                        uemJob = uemJobDict[jobIdStr]
                        uemJob._jobTasks[taskNo] = taskName
                
                # Update for Job [N-7] Task#0, from old status [NotStarted] to new status [Running]
                # Update for Job [N-7] Task#0, from old status [Running] to new status [Completed]
                # get task status changes...
                taskStatusMarker = re.compile(r"Update for Job \[(?P<jobIdStr>.*)\] Task#(?P<taskNo>\d+), from old status \[(?P<oldStatus>\w+)\] to new status \[(?P<newStatus>\w+)\]")
                s = taskStatusMarker.match(jobLogStr)
                if s: 
                    jobIdStr = s.group("jobIdStr")
                    taskNo = int(s.group("taskNo"))
                    oldStatus = s.group("oldStatus")
                    newStatus = s.group("newStatus")
                    if jobIdStr in uemJobDict: 
                        uemJob = uemJobDict[jobIdStr]
                        uemJob._jobTaskStatusTransitionList.append([taskNo, oldStatus, newStatus, jobLogDateTime, jobLogUsLong])
                
                
                
                
                
                
                
                
                
                
                
                
                
                
                
                
                # OnThreadStart
                # Invoke EMC_UEM_SnapshotScheduleJobContextDataLeaf::OnThreadStart for job[N-1]
                s = re.search(r'^Invoke (?P<ClassName>.*)::OnThreadStart for job\[(?P<jobIdStr>.*)\]', jobLogStr)
                if s: 
                    jobIdStr = s.group("jobIdStr")
                    className = s.group("ClassName")
                    # print spIdStr + ', job=' + jobIdStr + ', time=' + str(jobLogDateTime) + ', Invoked Callback=' + className + '::OnThreadStart '
                    if jobIdStr in uemJobDict: 
                        uemJob = uemJobDict[jobIdStr]
                    # else: 
                    #     uemJob = UemJob(jobIdStr)
                        uemJob._jobOnThreadStartTime.append(jobLogDateTime)
                        uemJob._jobOnThreadStartTimeUs.append(jobLogUsLong)
                        uemJob._jobOnThreadStartThreadId.append(jobThreadId)
                        
                        # job processing call graph start, key is 'spId_ECOMprocessId'
                        keyStr = spIdStr + '_' + str(ecomProcessId)
                        setFlag = False
                        for uemJobCallGraphSegment in uemJob._uemJobCallGraphSegmentList: 
                            if spIdStr == uemJobCallGraphSegment._spIdStr and ecomProcessId == uemJobCallGraphSegment._ecomProcessId: 
                                uemJobCallGraphSegment._OnThreadStartTime = jobLogDateTime
                                uemJobCallGraphSegment._OnThreadStartTimeUs = jobLogUsLong
                                setFlag = True
                        if setFlag == False: 
                            print 'Should not happen, exit...' 
                            print 'size of uemJob._uemJobCallGraphSegmentList = ' + str(len(uemJob._uemJobCallGraphSegmentList))
                            print 'OnThreadStart log line = ' + jobLogStr
                            sys.exit()
                            # uemJob._uemJobCallGraphSegmentList.append(UemJobCallGraphSegment(jobIdStr, spIdStr, ecomProcessId, jobLogDateTime, jobLogUsLong, jobThreadId))
                                
                        # print 'Added callgraph for job ' + jobIdStr + ', callgraph = ' + str(uemJob._uemJobCallGraphSegmentList)
                    
                # OnThreadExit
                # Invoke EMC_UEM_SnapshotScheduleJobContextDataLeaf::OnThreadExit for job[N-1]
                s = re.search(r'^Invoke (?P<ClassName>.*)::OnThreadExit for job\[(?P<jobIdStr>.*)\]', jobLogStr)
                if s: 
                    jobIdStr = s.group("jobIdStr")
                    className = s.group("ClassName")
                    if jobIdStr in uemJobDict: 
                        uemJob = uemJobDict[jobIdStr]
                    # else: 
                    #     uemJob = UemJob(jobIdStr)
                        uemJob._jobOnThreadExitTime.append(jobLogDateTime)
                        uemJob._jobOnThreadExitTimeUs.append(jobLogUsLong)
                        uemJob._jobOnThreadExitThreadId.append(jobThreadId)
                        
                        if uemJob._jobScalableFlag == False: 
                            for jobExecution in uemJob._jobExecutionList: 
                                if jobExecution[0] == jobThreadId and jobExecution[3] == None and jobExecution[4] == None: 
                                    jobExecution[3] = jobLogDateTime
                                    jobExecution[4] = jobLogUsLong
                                    # print 'Job ' + jobIdStr + ' finished execution at ' + str(jobLogUsLong) + ', ' + jobLogStr
                        
                        # job processing call graph exit, key is 'spId_ECOMprocessId'
                        keyStr = spIdStr + '_' + str(ecomProcessId)
                        setFlag = False
                        for uemJobCallGraphSegment in uemJob._uemJobCallGraphSegmentList: 
                            if uemJobCallGraphSegment._spIdStr == spIdStr and uemJobCallGraphSegment._ecomProcessId == ecomProcessId: 
                                uemJobCallGraphSegment._OnThreadExitTime = jobLogDateTime
                                uemJobCallGraphSegment._OnThreadExitTimeUs = jobLogUsLong
                                uemJobCallGraphSegment._OnThreadExitThreadId = jobThreadId
                                setFlag = True
                        if setFlag == False: 
                            print 'Warning: on ' + spIdStr + ', didn\'t find \'OnThreadStart\' method for job ' + jobIdStr + ', we don\'t have a start point of job execution, ignore for now ...'
                    

                # Executing job N-9,idx=1
                # job processing start...
                jobExecutionStartMarker = re.compile(r"^Executing job (?P<jobIdStr>.*),idx=(?P<idx>.*)")
                s = jobExecutionStartMarker.match(jobLogStr)
                if s: 
                    jobIdStr = s.group("jobIdStr")
                    if jobIdStr in uemJobDict: 
                        uemJob = uemJobDict[jobIdStr]
                    # else: 
                    #     uemJob = UemJob(jobIdStr)
                        uemJob._jobExecutionList.append([jobThreadId, jobLogDateTime, jobLogUsLong, None, None])
                        
                        # job processing start, key is 'spId_ECOMprocessId'
                        keyStr = spIdStr + '_' + str(ecomProcessId)
                        setFlag = False
                        for uemJobCallGraphSegment in uemJob._uemJobCallGraphSegmentList: 
                            if uemJobCallGraphSegment._spIdStr == spIdStr and uemJobCallGraphSegment._ecomProcessId == ecomProcessId: 
                                uemJobCallGraphSegment._processingSegList.append(UemJobProcessingSegment(jobLogDateTime, jobLogUsLong, jobThreadId, jobLogStr))
                                setFlag = True
                        if setFlag == False: 
                            # print 'TODO: no start log, cannot got a complete call graph for job ' + jobIdStr + ', key = ' + keyStr + ' from log files, ignore for parsing the job start processing log ...'
                            # print 'keys = ' + str(uemJob._uemJobCallGraphSegmentList)
                            # print 'Could be the 1st time execution...create one... for job ' + jobIdStr
                            jobCallGraphSegment = UemJobCallGraphSegment(jobIdStr, spIdStr, ecomProcessId, None, None, jobThreadId)
                            jobCallGraphSegment._processingSegList.append(UemJobProcessingSegment(jobLogDateTime, jobLogUsLong, jobThreadId, jobLogStr))
                            uemJob._uemJobCallGraphSegmentList.append(jobCallGraphSegment)
                            
                            # print 'size of seg list: ' + str(len(uemJob._uemJobCallGraphSegmentList))
                            # print 'size of seg processing seg list: ' + str(len(uemJob._uemJobCallGraphSegmentList[len(uemJob._uemJobCallGraphSegmentList)-1]._processingSegList))
                
                # Stop processing job, id=N-10 name=job.applicationprovisioningservice.job.CreateVirtualVolume state=Running subState=Solo requestID=
                # job processing end...
                jobExecutionEndMarker = re.compile(r"^Stop processing job, id=(?P<jobIdStr>.*) name=(?P<jobName>.*) state=(?P<jobState>.*) subState=.*")
                s = jobExecutionEndMarker.match(jobLogStr)
                if s: 
                    jobIdStr = s.group("jobIdStr")
                    if jobIdStr in uemJobDict: 
                        uemJob = uemJobDict[jobIdStr]
                    # else: 
                    #     uemJob = UemJob(jobIdStr)
                        if uemJob._jobScalableFlag == True: 
                            for jobExecution in uemJob._jobExecutionList: 
                                if jobExecution[0] == jobThreadId and jobExecution[3] == None and jobExecution[4] == None: 
                                    jobExecution[3] = jobLogDateTime
                                    jobExecution[4] = jobLogUsLong
                                    # print 'Job ' + jobIdStr + ' finished execution at ' + str(jobLogUsLong) + ', ' + jobLogStr
                                    
                        # job processing stop, key is 'spId_ECOMprocessId'
                        keyStr = spIdStr + '_' + str(ecomProcessId)
                        setFlag = False
                        for uemJobCallGraphSegment in uemJob._uemJobCallGraphSegmentList: 
                            if uemJobCallGraphSegment._spIdStr == spIdStr and uemJobCallGraphSegment._ecomProcessId == ecomProcessId: 
                                # print 'size of seg processing seg list = ' + str(len(uemJobCallGraphSegment._processingSegList))
                                for processingSeg in uemJobCallGraphSegment._processingSegList: 
                                    # if processingSeg._stopTime == None and processingSeg._startTime <= jobLogDateTime: 
                                    if processingSeg._startThreadId == jobThreadId and processingSeg._startTime <= jobLogDateTime and processingSeg._stopTimeUs == None: 
                                        processingSeg._stopTime = jobLogDateTime
                                        processingSeg._stopTimeUs = jobLogUsLong
                                        processingSeg._stopThreadId = jobThreadId
                                        processingSeg._stopLog = jobLogStr
                                        # print 'TEST: ', jobIdStr, ' set stop processing time: ', processingSeg._stopTime, ', starttime = ', processingSeg._startTime
                                        setFlag = True
                        # if setFlag == False: 
                            # print 'Warning: on ' + spIdStr + ', didn\'t find \'Start to run the top level job\' log for ' + jobIdStr + ', we don\'t have a start point of job execution, ignore for now ...'
                                
                                
                
                # Task returned pending id=N-10 name=job.applicationprovisioningservice.job.CreateVirtualVolume state=Running subState=Solo requestID=
                s = re.search(r"^Task returned pending id=(?P<jobIdStr>.*) name=.* state=.* subState=.*", jobLogStr)
                if s: 
                    jobIdStr = s.group("jobIdStr")
                    if jobIdStr in uemJobDict: 
                        uemJob = uemJobDict[jobIdStr]
                    # else: 
                    #     uemJob = UemJob(jobIdStr)
                        # print 'Detected Scalable Job, Job ID = ' + jobIdStr
                        uemJob._jobScalableFlag = True
                
                
                                
                
                                
                                
                                
                # Reschedule Job N-24667 from state [Running] to new state [Completed].
                jobFinalStateMarker = re.compile(r'Reschedule Job (?P<jobIdStr>.*) from state \[(?P<oldJobState>\w+)\] to new state \[(?P<newJobState>\w+)\].')
                s = jobFinalStateMarker.match(jobLogStr)
                if s: 
                    jobIdStr = s.group('jobIdStr')
                    if jobIdStr in uemJobDict: 
                        uemJob = uemJobDict[jobIdStr]
                        # uemJob._jobFinalState = s.group('newJobState')
                        uemJob._jobStateTransitionList.append([spIdStr, jobLogDateTime, jobLogUsLong, jobIdStr, s.group('newJobState')])
                
                # Encounter error while executing Job[N-17], Task#0[C4CB_UIS_Uem_CommonTaskCreateUserImpl], RecoveryPolicy: Rollback (Continue), Recovery (Rollback): error = 100663399, reason = !
                # Encounter error while executing Job[N-24670], Task#0[CreateAppSnapGroup_root/emc:EMC_UEM_ApplicationLeaf%InstanceID=res_499], RecoveryPolicy: Rollback (Continue), Recovery (Rollback): error = 1903001807, reason = !
                # execution error: 
                jobExecutionErrorMarker = re.compile(r'Encounter error while executing Job\[(?P<jobIdStr>.*)\], Task#(?P<taskIndexStr>\d+)\[(?P<taskNameStr>.*)\], (.*): error = (?P<errorCodeStr>\d+), reason(.*)')
                s = jobExecutionErrorMarker.match(jobLogStr)
                if s: 
                    jobIdStr = s.group('jobIdStr')
                    if jobIdStr in uemJobDict: 
                        uemJob = uemJobDict[jobIdStr]
                    # else: 
                    #     uemJob = UemJob(jobIdStr)
                        executionErrorStr = s.group('errorCodeStr')
                        taskIndexStr = s.group('taskIndexStr')
                        taskNameStr = s.group('taskNameStr')
                        
                        uemJob._executionErrorList.append([jobThreadId, jobLogDateTime, jobLogUsLong, taskIndexStr, taskNameStr, executionErrorStr, jobLogStr])
                    
                # Collect callback list
                # Invoke EMC_UEM_Task_DeleteFlareLULeaf::ExecutePreconditionTest for job[N-30096]
                jobCallbackMaker = re.compile(r'^Invoke (?P<callbackName>.*) for job\[(?P<jobIdStr>.*)\]')
                s = jobCallbackMaker.match(jobLogStr)
                if s: 
                    jobIdStr = s.group('jobIdStr')
                    if jobIdStr in uemJobDict: 
                        uemJob = uemJobDict[jobIdStr]
                    # else: 
                    #     uemJob = UemJob(jobIdStr)
                        callbackName = s.group('callbackName')
                        uemJob._callbackList.append([jobThreadId, jobLogDateTime, jobLogUsLong, callbackName, jobLogStr, ecomProcessId, spIdStr])
                    
                
                    
                    
                
                
                
                
                    
                # IndicationCreation sent for job [N-6],job state=New
                # IndicationCompletion sent for job [N-3],job state=Completed
                # IndicationModification sent for job [N-2],job state=Running,current task index=0, status=Completed
                # IndicationModification sent for job [N-17],job state=Rollback,current task index=0, status=RollingBack
                # IndicationModification sent for job [N-17],job state=Rollback,current task index=0, status=Failed
                
                # IndicationModification sent for job [N-29464],job state=Running,current task index=2,current task status=3
                indicationMarker = re.compile(r"^(?P<IndicationTypeStr>.*) sent for job \[(?P<jobIdStr>.*)\],job state=(?P<jobStateStr>.*),current task index=(?P<CurrentTaskIndex>\d+),(.*)status=(?P<CurrentTaskStatusStr>\w+)")
                s = indicationMarker.match(jobLogStr)
                if s: 
                    jobIdStr = s.group('jobIdStr')
                    indicationTypeStr = s.group('IndicationTypeStr')
                    jobStateStr = s.group('jobStateStr')
                    currentTaskIndex = s.group('CurrentTaskIndex')
                    currentTaskStatusStr = s.group('CurrentTaskStatusStr')
                    # append ...
                    if jobIdStr in uemJobDict: 
                        uemJob = uemJobDict[jobIdStr]
                    # else: 
                    #     uemJob = UemJob(jobIdStr)
                    # uemJob._jobStateTransitionList.append([spIdStr, jobLogDateTime, jobLogUsLong, jobThreadId, jobIdStr, indicationTypeStr, jobStateStr, currentTaskIndex, currentTaskStatusStr])
                        uemJob._jobStateTransitionList.append([spIdStr, jobLogDateTime, jobLogUsLong, jobIdStr, jobStateStr])
                else: 
                    indicationMarker = re.compile(r"^(?P<IndicationTypeStr>.*) sent for job \[(?P<jobIdStr>.*)\],job state=(?P<jobStateStr>.*)")
                    s = indicationMarker.match(jobLogStr)
                    if s: 
                        jobIdStr = s.group('jobIdStr')
                        indicationTypeStr = s.group('IndicationTypeStr')
                        jobStateStr = s.group('jobStateStr')
                        # append ...
                        if jobIdStr in uemJobDict: 
                            uemJob = uemJobDict[jobIdStr]
                        # else: 
                        #     uemJob = UemJob(jobIdStr)
                        # uemJob._jobStateTransitionList.append([spIdStr, jobLogDateTime, jobLogUsLong, jobThreadId, jobIdStr, indicationTypeStr, jobStateStr, None, None])
                            uemJob._jobStateTransitionList.append([spIdStr, jobLogDateTime, jobLogUsLong, jobIdStr, jobStateStr])
                    
                
            # else: 
                # print 'not recognized log, keep it for testing... TODO, special handling...'
                # print line
                
            lineCount += 1
        fd.close()
        
        
        
        
        
        
        
        
        
        
        
        
        
        
        
        
        
        # directly put into self._uemJobDict
        for jobIdStr in uemJobIdList: 
            uemJob = uemJobDict[jobIdStr]
            
            # sort job state transaction list: 
            uemJob._jobStateTransitionList.sort(uem_job_state_transaction_list_sort)
            
            if jobIdStr in self._uemJobDict: 
                # print 'Already in the job dict, need merge...'
                uemJob_1 = self._uemJobDict[jobIdStr]
                
                # 1. job State Transaction List
                for jobStateTransaction in uemJob._jobStateTransitionList: 
                    uemJob_1._jobStateTransitionList.append(jobStateTransaction)
                uemJob_1._jobStateTransitionList.sort(uem_job_state_transaction_list_sort)
                # 2. final job state
                if len(uemJob_1._jobStateTransitionList) > 0: 
                    uemJob_1._jobFinalState = uemJob_1._jobStateTransitionList[len(uemJob_1._jobStateTransitionList)-1][4]
                # 3. scalable job flag 
                if uemJob_1._jobScalableFlag != uemJob._jobScalableFlag and uemJob._jobScalableFlag == True: 
                    uemJob_1._jobScalableFlag = True
                # 4. running with ECOM restart flag:
                if uemJob_1._runningWithECOMRestarts != uemJob._runningWithECOMRestarts and uemJob._runningWithECOMRestarts == True: 
                    uemJob_1._runningWithECOMRestarts = True
                # 5. task status list
                for jobTaskStatusTransaction in uemJob._jobTaskStatusTransitionList: 
                    uemJob_1._jobTaskStatusTransitionList.append(jobTaskStatusTransaction)
                uemJob_1._jobTaskStatusTransitionList.sort(uemjob_task_status_transaction_list_sort)
                # 6. Execution error list
                for executionError in uemJob._executionErrorList: 
                    uemJob_1._executionErrorList.append(executionError)
                uemJob_1._executionErrorList.sort(uemjob_execution_error_list_sort)
                # 7. callback list: 
                for callback in uemJob._callbackList: 
                    uemJob_1._callbackList.append(callback)
                uemJob_1._callbackList.sort(uemjob_callback_list_sort)
                # 8. callgraph segment list
                for callGraphSegment in uemJob._uemJobCallGraphSegmentList: 
                    uemJob_1._uemJobCallGraphSegmentList.append(callGraphSegment)
                uemJob_1._uemJobCallGraphSegmentList.sort(uemjob_callGraphSegment_list_sort)
                # 9. submit time; 
                if uemJob_1._jobSubmitTime > uemJob._jobSubmitTime: 
                    uemJob_1._jobSubmitTime = uemJob._jobSubmitTime
                    uemJob_1._jobSubmitTimeUs = uemJob._jobSubmitTimeUs
                    uemJob_1._jobSubmitThreadId = uemJob._jobSubmitThreadId
                    uemJob_1._jobSubmitSpIdStr = uemJob._jobSubmitSpIdStr
            else: 
                self._uemJobDict[jobIdStr] = uemJob
                self._uemJobIdList.append(jobIdStr)
                
        jobIdStrList = self._uemJobDict.keys()
        # print 'After tirage on ' + spIdStr + ', there are ' + str(len(jobIdStrList)) + ' jobs.'
        
    def get_scalable_uem_job_list(self): 
        for jobIdStr in self._uemJobIdList: 
            uemJob = self._uemJobDict[jobIdStr]
            if uemJob._jobScalableFlag == True: 
                self._uemJobScalableList.append(uemJob)
                
    def get_abnormal_uem_job_list(self): 
        for jobIdStr in self._uemJobIdList: 
            uemJob = self._uemJobDict[jobIdStr]
            
            # get abnormal uem job list
            if (len(uemJob._jobStateTransitionList) > 0): 
                uemJob._jobFinalState = uemJob._jobStateTransitionList[len(uemJob._jobStateTransitionList)-1][4]
                if (uemJob._jobFinalState != 'Completed'): 
                    self._uemJobAbnormalList.append(uemJob)
            else: 
                print 'Error: job ' + uemJob._jobIdStr + ' has no state'
            
    def build_relationship_with_batch_job(self): 
        for jobIdStr in self._uemJobIdList: 
            uemJob = self._uemJobDict[jobIdStr]
            if len(uemJob._jobBatchJobIdStr) > 0: 
                if uemJob._jobBatchJobIdStr in self._batchJobDict:
                    self._batchJobDict[uemJob._jobBatchJobIdStr].append(uemJob._jobIdStr)
                else: 
                    self._batchJobList.append(uemJob._jobBatchJobIdStr)
                    self._batchJobDict[uemJob._jobBatchJobIdStr] = [uemJob._jobIdStr]
    
    def run(self): 
        # print 'UemJobPaser::run, started to analysis uem job service log file: '
        uemJobLogFileBaseName = 'cemtracer_uemjobsvc.log'
        uemJobLogSpa = None
        uemJobLogSpb = None
        
        if self._jobOutSpa != None: 
            uemJobLogSpa = os.path.join(self._jobOutSpa, uemJobLogFileBaseName)
        if self._jobOutSpb != None: 
            uemJobLogSpb = os.path.join(self._jobOutSpb, uemJobLogFileBaseName)
            
        # start to analyze uem job log file on spa
        if uemJobLogSpa != None and os.path.isfile(uemJobLogSpa) == True:
            self.start_analyze(uemJobLogSpa, 'spa')
        
        # start to analyze uem job log file on spb
        if uemJobLogSpb != None and os.path.isfile(uemJobLogSpb) == True: 
            uemJobLogSpb = os.path.join(self._jobOutSpb, uemJobLogFileBaseName)
            self.start_analyze(uemJobLogSpb, 'spb')

        # start to analyze uem job/task db tables
        self.start_analyze_uem_jobtask_db_tables() 
        # self.display_jobtask_db_summary()
        
        self.get_abnormal_uem_job_list()
        self.get_scalable_uem_job_list()
        self.build_relationship_with_batch_job()
        
        
################################
# end - class UemJobParser
################################

if __name__ == '__main__':
    if (_validateInputParameters() == False): 
        print parser.print_help()
        sys.exit()
    
    startTime = datetime.datetime.now()
    
    # start extract dc file.
    dcExtractor = DcExtractor(args.dc)
    if (dcExtractor.run() == False): 
        sys.exit()
    
    # uem job parser
    uemJobParser = UemJobParser(dcExtractor._jobFolder, dcExtractor.jobOutFolder, dcExtractor.jobOutSpa, dcExtractor.jobOutSpb, dcExtractor._db_reports_folder)
    uemJobParser.run()
    
    
    # Display the folders: original log folders, tmp folders, out folders: 
    # print 'SPA log file root folder = ', dcExtractor.spaLogFileRoot
    # print 'SPB log file root folder = ', dcExtractor.spbLogFileRoot
    # print 'SPA tmp folder           = ', dcExtractor.jobTmpSpa
    # print 'SPB tmp folder           = ', dcExtractor.jobTmpSpb
    # print 'job folder               = ', dcExtractor._jobFolder
    # print 'out folder               = ', dcExtractor.jobOutFolder
    # print 'SPA out folder           = ', dcExtractor.jobOutSpa
    # print 'SPB out folder           = ', dcExtractor.jobOutSpb

    print '==================================== Job Triage Information ===================================='
    print 'Job Triage DC File                   = ', args.dc
    print 'Aggregated CEM Log Folder            = ', uemJobParser._jobOutFolder
    print 'Job Triage Result Folder             = ', uemJobParser._resultFolder
    print 'Uem JobTask Related DB Table Count   = ', uemJobParser._numOfJobTaskRelatedTables
    for f in uemJobParser._jobRelatedTableFiles             : print '    ' + f
    for f in uemJobParser._jobTaskRelatedTableFiles         : print '    ' + f
    for f in uemJobParser._jobRelatedArrayTableFiles        : print '    ' + f
    for f in uemJobParser._jobTaskRelatedArrayTableFiles    : print '    ' + f
    for f in uemJobParser._jobAssocTableFiles               : print '    ' + f
    for f in uemJobParser._jobTaskAssocTableFiles           : print '    ' + f 
    print 
    
    print '====================================== Job Triage Result ======================================='
    uemJobParser.generate_job_analysis_summary()
    uemJobParser.generate_analysis_result()
    
    print 
    print 'Generated job analysis summary File: \n    ' + uemJobParser._summaryFile
    print 'Generated job analysis result files for ' + str(len(uemJobParser._callGraphFileList)) + ' jobs.'
    for callGraphFile in uemJobParser._callGraphFileList: 
        print '    ' + callGraphFile
    
    endTime = datetime.datetime.now()
    print 'Job Triage Finished in ' + str((endTime-startTime).seconds) + ' seconds!'
    
    del dcExtractor

