import os.path
import re
import time

from Ganga.Utility.Shell import Shell
from Ganga.Utility.GridShell import getShell
from Ganga.Core.GangaThread.MTRunner import MTRunner, Data, Algorithm

def get_job_roundtime(job):
    try:
        rt = (job.time.final() - job.time.submitted()).seconds
        return rt
    except Exception, e:
        print job.id, job.time.submitted(), job.time.submitted()
        raise e

def grid_efficiency(job):
    runtime   = get_app_runtime(job)
    roundtime = get_job_roundtime(job)

    return runtime * 100.0 / roundtime

def get_job_overhead(job):

    runtime   = get_app_runtime(job)
    roundtime = get_job_roundtime(job)

    if runtime < 0:
        return -9999
    else:

        overhead = roundtime - runtime

        #if overhead > 2*runtime:
        #    f = open('big_overhead_jobs.txt','a')
        #    f.write('%f %s %s\n' % ( overhead*1.0/runtime, job.fqid, job.backend.actualCE ))
        #    f.close()

        return roundtime - runtime

def get_job_events(job):

    if job.status != 'completed':
        return -9999

    re_nevt = re.compile('AMAEventCounter::finalize.*\s([0-9]+)\s.*events in total')

    cmd_prefix = ''
    filepath   = ''
    if job.backend.__dict__['_impl']._name == 'Panda':
        cmd_prefix = 'grep "AMAEventCounter::finalize"'
        filepath = job.outputdir+'/athena_stdout.txt'
    else:
        cmd_prefix = 'zgrep "AMAEventCounter::finalize"'
        filepath = job.outputdir+'/stdout.gz'

    nevt = 0
    s = Shell()
    if filepath:
        rc,out,m = s.cmd1('%s %s' % (cmd_prefix, filepath))

        if rc == 0: 
            lines = out.split('\n')
            for l in lines:
                l = l.strip()
                m = re_nevt.match(l)
                if m:
                    nevt = int(m.group(1)) 
        else:
            print 'cannot get event number for job %s' % job.fqid

    return nevt

def get_app_runtime(job):

    if job.status != 'completed':
        return -9999

    #if type(job.backend) is type(Panda()):
    if job.backend.__dict__['_impl']._name == 'Panda':
        pilotTiming = job.backend.jobSpec['pilotTiming']
        t_info = pilotTiming.split('|')
        return int(t_info[2])
        
    filepath = job.outputdir+'/__jobscript__.log'

    re_timebeg = re.compile('^(.*)\s+(\[Info\])\s+(Load application executable).*$')
    re_timeend = re.compile('^(.*)\s+(\[Info\])\s+(GZipping stdout and stderr).*$')

    timebeg_str = ''
    timeend_str = ''

    timebeg_sec = 0 
    timeend_sec = 0
    if os.path.exists(filepath):
        f = open(filepath)
        for l in f.readlines():
            matches = re_timebeg.match(l.strip())
            if matches:
                timebeg_str = matches.group(1)
                continue
            else:
                matches = re_timeend.match(l.strip())
                if matches:
                    timeend_str = matches.group(1)
                    break
                else:
                    continue
        f.close()

    if timebeg_str and timeend_str:
        timebeg_sec = time.mktime(time.strptime(timebeg_str.strip(), '%a %b %d %H:%M:%S %Y'))
        timeend_sec = time.mktime(time.strptime(timeend_str.strip(), '%a %b %d %H:%M:%S %Y'))

    return timeend_sec - timebeg_sec

def retrieve_panda_log(jlist):
    """
    Download the Panda job log files into the job's outputdir using MTRunner
    """

    ## setting up the gridShell environment for executing dq2-get command
    gs = getShell(middleware='GLITE')

    gs.env['DQ2_URL_SERVER']=config.DQ2.DQ2_URL_SERVER
    gs.env['DQ2_URL_SERVER_SSL']=config.DQ2.DQ2_URL_SERVER_SSL
    gs.env['DQ2_LOCAL_ID']=''

    import GangaAtlas.PACKAGE
    try:
        pythonpath=GangaAtlas.PACKAGE.setup.getPackagePath2('DQ2Clients','PYTHONPATH',force=False)
    except:
        pythonpath = ''

    gs.env['PYTHONPATH'] = gs.env['PYTHONPATH']+':'+pythonpath

    ## exclude the Ganga-owned external package for LFC python binding
    pythonpaths = []
    for path in gs.env['PYTHONPATH'].split(':'):
        if not re.match('.*\/external\/lfc\/.*', path):
            pythonpaths.append(path)
        gs.env['PYTHONPATH'] = ':'.join(pythonpaths)

    class PandaLogDownloadAlgorithm(Algorithm):
        """
        Class for downloading Panda log files from a DQ2 dataset
        """

        def process(self, job):
            """
            downloads log file of the given job item
            """

            ick = False
            localdir = job.outputdir
            ds       = job.outputdata.datasetname
            filename = ''
            for f in map(lambda x:x.split(',')[1], job.outputdata.output):
                if f.find('log.tgz') > 0:
                    filename = f
                    break

            if not filename:
                return ick 

            if os.path.exists(os.path.join(localdir,'athena_stdout.txt')):
                ick = True
                return ick 

            tar_fpath = os.path.join(localdir, filename)
            cmd = 'dq2-get --client-id=ganga -L ROAMING -a -d -D -H %s -f %s %s' % (localdir,filename,ds)
            rc, out, m = gs.cmd1(cmd,allowed_exit=[0,255])
            if rc == 0:
                cmd = 'tar tzf %s | grep "athena_stdout.txt"' % tar_fpath
                rc, out, m = gs.cmd1(cmd,allowed_exit=[0,255])
                if rc == 0:
                    athena_log = out.strip()
                    cmd = 'cd %s; tar xzf %s %s; mv %s %s' % (localdir, tar_fpath, athena_log,
                                                             os.path.join(localdir, athena_log),
                                                             os.path.join(localdir, 'athena_stdout.txt'))
                    rc, out, m = gs.cmd1(cmd,allowed_exit=[0,255])
                    if rc == 0:
                        ick = True

            ## remove the downloaded file 
            try:
                os.unlink(tar_fpath)
            except:
                pass

            return ick 
   
    ## start the MTRunner to download log tarball and extract the athena_stdout.txt file
    runner = MTRunner(name='panda_log_downloader',
                      data=Data(collection=jlist),
                      algorithm=PandaLogDownloadAlgorithm(),
                      numThread=10,
                      keepAlive=False)
    runner.start()
    runner.join()

    ## return the list of jobs contains the successfully downloaded athena_stdout.txt 
    return runner.getDoneList()

def get_job_timestamp(job, state):

    tobj = job.time.timestamps
   
    ts = 0
 
    try:
        ts = int(time.mktime(tobj[state].utctimetuple()))
    except KeyError:
        pass
    except Exception,e:
        print job.id, tobj 
        raise e

    return ts

def make_root_ttree(fpath, jlist):

    ## separating Panda jobs from the given jlist (as the log file needs to be retrieved first)
    panda_jlist = []
    for j in jlist:
        if j.backend.__dict__['_impl']._name == 'Panda':
            panda_jlist.append(j)

    ret_jlist = retrieve_panda_log(panda_jlist)

    if len(ret_jlist) < len(panda_jlist):
        print 'Panda log for some jobs not properly retrieved: %d/%d' % (len(ret_jlist), len(panda_jlist))

    ## creating TTree
    from ROOT import TFile, TTree

    #from job_data_proc import get_app_runtime, get_job_roundtime
    
    from array import array
    from ROOT import gROOT, AddressOf

    gROOT.ProcessLine(
    "struct JobInfoStruct {\
       Char_t  fqid[16];\
       Char_t  bkend[8];\
       Char_t  bkid[128];\
       Char_t  status[16];\
       Char_t  cloud[8];\
       Char_t  ce[64];\
    };" );
    
    from ROOT import JobInfoStruct
    
    myjobinfo = JobInfoStruct()
    
    job_tsubmit = array('i',[0])
    job_tstart  = array('i',[0])
    job_tstop   = array('i',[0])

    job_etime = array('f',[-9999.])
    app_etime = array('f',[-9999.])
    no_events = array('i',[-9999])
    
    f = TFile(fpath,'RECREATE')
    
    t = TTree('Tree_JobInfo','Ganga Job Information Tree')
    
    t.Branch('jobId'        , AddressOf(myjobinfo,'fqid')   , 'fqid/C')
    t.Branch('backend'      , AddressOf(myjobinfo,'bkend')  , 'bkend/C')
    t.Branch('backendJobId' , AddressOf(myjobinfo,'bkid')   , 'bkid/C')
    t.Branch('status'       , AddressOf(myjobinfo,'status') , 'status/C')
    t.Branch('cloud'        , AddressOf(myjobinfo,'cloud')  , 'cloud/C')
    t.Branch('ce'           , AddressOf(myjobinfo,'ce')     , 'ce/C')
    
    t.Branch('tSubmit', job_tsubmit, 'job_tsubmit/I')
    t.Branch('tStart' , job_tstart , 'job_tstart/I')
    t.Branch('tStop'  , job_tstop  , 'job_tstop/I')

    t.Branch('jobEtime', job_etime, 'job_etime/F')
    t.Branch('appEtime', app_etime, 'app_etime/F')
    t.Branch('noEvents', no_events, 'no_events/I')
    
    for j in jlist:
    
        if len(j.subjobs) > 0:
            for sj in j.subjobs:
                myjobinfo.fqid   = "%s\0" % sj.fqid
                myjobinfo.bkend  = "%s\0" % sj.backend.__dict__['_impl']._name
                myjobinfo.bkid   = "%s\0" % str(sj.backend.id)
                myjobinfo.status = "%s\0" % sj.status
                myjobinfo.cloud  = "%s\0" % sj.backend.requirements.cloud
                myjobinfo.ce     = "%s\0" % sj.backend.actualCE

                job_tsubmit[0] = 0 
                job_tstart[0]  = 0 
                job_tstop[0]   = 0 
                
                job_etime[0] = -9999. 
                app_etime[0] = -9999. 
                no_events[0] = -9999 

                if sj.status not in ['new','submitting']:
                    job_tsubmit[0] = get_job_timestamp(sj,'submitted')

                    if 'running' in sj.time.timestamps.keys():
                        job_tstart[0]  = get_job_timestamp(sj,'running')
                        if sj.status in ['killed']:
                            job_tstop[0]   = get_job_timestamp(sj,'killed')
                        elif sj.status in ['completed','failed']:
                            job_tstop[0]   = get_job_timestamp(sj,'final')
                            job_etime[0] = get_job_roundtime(sj)
                            app_etime[0] = get_app_runtime(sj)
                            no_events[0] = get_job_events(sj)

                t.Fill()
    
        else:
            myjobinfo.fqid   = "%s\0" % j.fqid
            myjobinfo.bkend  = "%s\0" % j.backend.__dict__['_impl']._name
            myjobinfo.bkid   = "%s\0" % str(j.backend.id)
            myjobinfo.status = "%s\0" % j.status
            myjobinfo.cloud  = "%s\0" % j.backend.requirements.cloud
            myjobinfo.ce     = "%s\0" % j.backend.actualCE
    
            job_tsubmit[0] = 0 
            job_tstart[0]  = 0 
            job_tstop[0]   = 0 
            
            job_etime[0] = -9999. 
            app_etime[0] = -9999. 
            no_events[0] = -9999 

            if j.status not in ['new','submitting']:
                job_tsubmit[0] = get_job_timestamp(j,'submitted')

                if 'running' in j.time.timestamps.keys():
                    job_tstart[0]  = get_job_timestamp(j,'running')
                    if j.status in ['killed']:
                        job_tstop[0]   = get_job_timestamp(j,'killed')
                    elif j.status in ['completed','failed']:
                        job_tstop[0]   = get_job_timestamp(j,'final')
                        job_etime[0] = get_job_roundtime(j)
                        app_etime[0] = get_app_runtime(j)
                        no_events[0] = get_job_events(j)

            t.Fill()

    ## printing the summary table 
    t.Print()
   
    ## showing the first event 
    t.Show(0)
    
    f.Write()
    f.Close()
