from collections import defaultdict, namedtuple
import re, sys

RWMODES = ["RD", "WR"]


def id_socket_lines(filename) :
    for l in open(filename) :
        if l[0] != "$" :
            continue
        flds = l.split()
        if len(flds) != 2 :
            continue
        yield flds


def address_lines(filename, id_to_socket) :
    for l in open(filename) :
        if l[0] != "$" :
            continue
        flds = l.split()
        if len(flds) < 4 :
            continue
        assert flds[4] in RWMODES
        assert flds[6]  == "OK"
        assert id_to_socket.has_key(flds[0])
        yield flds

def bandwidth_calculation(filename, id_to_socket, outdir) :
    requests = defaultdict( lambda: {"RD":[],"WR":[],"any":[]} )
    for flds in address_lines(filename, id_to_socket) :
        initiator = id_to_socket[ flds[0] ]
        if "_moda" == initiator[-5:] :
            continue
        rwmode = flds[4]
        num_bytes = int(flds[5])
        start, stop = int(flds[1][1:]), int(flds[2][1:])
        if "_targ_" in initiator :
            which_inits = [initiator, "total_ddr"]
        else :
            which_inits = [initiator, "total"]
        for init in which_inits :
            requests[init][rwmode].append( (start,stop,num_bytes) )
            requests[init]["any" ].append( (start,stop,num_bytes) )
    bwfile = outdir+"/bandwidth_results.txt"
    ofp = open(bwfile, "w")
    for initiator in requests :
        bwstats = {}
        for rwmode in requests[initiator] :
            windows = requests[initiator][rwmode]
            if len(windows) == 0 :
                bandwidth = None
            else :
                windows.sort(key = lambda x : x[0])
                #print windows[0], windows[-1]   
                start = windows[0][0]
                windows.sort(key = lambda x : -x[1])
                num_bytes = sum( (x[2] for x in windows) )
                #print windows[0], windows[-1]
                stop = windows[0][1]
                bandwidth = (1.*num_bytes) / (stop-start) * pow(10,3) # Gbps
            #print initiator, rwmode, len(windows), start, stop, num_bytes, bandwidth
            bwstats[rwmode] = bandwidth
        print >> ofp, "%25s    " % initiator,
        for rwmode in ["RD", "WR", "any"] :
            try :
                print >> ofp, "%3s = %6.3f    " % (rwmode, bwstats[rwmode]),
            except TypeError :
                print >> ofp, "%3s = %6s    " % (rwmode, "None"),
        print >> ofp, ""
    ofp.close()
    # sort the file
    lines = open(bwfile).readlines()
    lines.sort( key = lambda x : x.split()[0].lstrip()[0:5]!="total" )
    ofp = open(bwfile, 'w')
    for l in lines :
        ofp.write(l)
    ofp.close()

def process_journal_file(filename, outdir) :
    with open(outdir+"/max_latency.txt",'w') as latency_ofp :
        id_to_socket = {}
        ## parse all symbols associated with initiators, targets, ddrs, etc
        for flds in id_socket_lines(filename) :
            id_to_socket[ flds[0] ] = flds[1]
        print >> latency_ofp, "="*10, "There are %d symbols." % len(id_to_socket)
        ## count latencies
        latencies = defaultdict(lambda:{"RD":[],"WR":[]})
        for flds in address_lines(filename, id_to_socket) :
            latency = int((int(flds[2][1:]) - int(flds[1][1:]))/1000.)
            latencies [id_to_socket[flds[0]]] [flds[4]] .append(latency)
        ## top latencies for read and write
        for socket in latencies :
            for mode in latencies[socket] :
                latencies[socket][mode] = descriptive_stats(latencies[socket][mode])
        for mode in RWMODES :
            print >> latency_ofp, "="*10, "Sockets sorted on max %s latency" % mode
            sockets_with_latency = [sock for sock in latencies.keys() \
                        if latencies[sock][mode] != None and sock[-4:] != "moda"]
            for sock in sorted(sockets_with_latency,
                            key=lambda x:-1*latencies[x][mode].max) :
                print >> latency_ofp, "%20s    "%sock, latencies[sock][mode]
    ## check requests to same address from ddr and non-ddr sockets 
    lnum = 0
    sources = {}
    ddr_re = re.compile("^ddr.*_data")
    ddr1_inv_mask = int("000000000100", 16)
    lines_to_print = []
    # count windows of requests for each initiator
    bandwidth_calculation(filename, id_to_socket, outdir)
    for flds in address_lines(filename, id_to_socket) :
        ak = tuple(flds[3:5])
        masklen = len(flds[3])-1
        masked = hex(ddr1_inv_mask|int(ak[0][1:],16))[2:].upper()
        masked = "@" + ("0"*(masklen-len(masked))) + masked
        ak1 = (masked, ak[1])
        if ddr_re.search(id_to_socket[flds[0]]) :
            oldflds = None
            if sources.has_key(ak) :
                oldflds = sources[ak]
            elif sources.has_key(ak1) :
                oldflds = sources[ak1]
            if oldflds != None :
                #print id_to_socket[oldflds[0]], oldflds
                #print id_to_socket[flds[0]], flds
                noc_entry_delay = int(int(oldflds[-1].split("}{")[0][1:-1].split(",")[2])/1000.)
                start_time  = int(oldflds[1][1:])
                request_time  = time_duration(flds[1], oldflds[1])
                noc_dp = int( 100.*noc_entry_delay/(0.+request_time) )
                noc_req_delay = request_time - noc_entry_delay
                response_time = time_duration(oldflds[2], flds[2])
                total_time = time_duration(oldflds[2], oldflds[1])
                ddr_ctime = time_duration(flds[2], flds[1])
                pstr =  "%20s %15s %2s %10d %19s  total= %4d nocEntryDelay= %3d (%2d%%) nocReqDelay= %3d (%3d%%) req= %3d resp= %3d ddr= %3d (%2d%%)" % \
                    (id_to_socket[oldflds[0]], oldflds[3], oldflds[4], \
                        start_time, id_to_socket[flds[0]], \
                        total_time, noc_entry_delay, noc_dp, \
                        noc_req_delay, 100-noc_dp, \
                        request_time, response_time, ddr_ctime, \
                        (100.*ddr_ctime)/total_time)
                lines_to_print.append(pstr)
                try :
                    del sources[ak]
                except KeyError :
                    del sources[ak1]
        elif id_to_socket[flds[0]][-4:] != "moda" :
            if sources.has_key(ak) :
                print sources[ak]
                print flds
                assert False
            sources[ak] = flds
    outlines = defaultdict(lambda:[])
    for l in lines_to_print :
        outlines[l.split()[0]].append(l)
    long_requests = defaultdict(lambda:{"WR":{"long":0,"total":0}, "RD":{"long":0,"total":0}, "any":{"long":0,"total":0}})
    long_response = defaultdict(lambda:{"WR":{"long":0,"total":0}, "RD":{"long":0,"total":0}, "any":{"long":0,"total":0}})
    long_counts_flds = [ ("req=", long_requests, "long_requests.txt"), ("resp=", long_response, "long_response.txt") ]
    for fn in outlines :
        for l in outlines[fn] :
            for substr, request_counts, filename in long_counts_flds :
                flds = l.split()
                the_time = int(flds[ flds.index("req=")+1 ])
                for rw in [flds[2],"any"] :
                    request_counts [flds[0]] [rw] ["total"] += 1
                    request_counts ["all"]   [rw] ["total"] += 1
                    if the_time > 150 :
                        request_counts [flds[0]] [rw] ["long"] += 1
                        request_counts ["all"]   [rw] ["long"] += 1
        ofp = open(outdir+"/"+fn+".sorted", 'w')
        for l in sorted(outlines[fn], key=lambda x:-int(x.split()[6])) :
            print >> ofp, l
        ofp.close()
    for substr, request_counts, filename in long_counts_flds :
        with open(outdir+"/" + filename, 'w') as ofp :
            for ak in request_counts :
                print >> ofp, "%20s     " % ak,
                for ctype in ["RD","WR","any"] :
                    print >> ofp, "        ", ctype+"=",
                    total, long = request_counts[ak][ctype]["total"], request_counts[ak][ctype]["long"]
                    if total > 0 :
                        perc = int(100*long/(0.+total))
                        print >> ofp, "%4d/%5d(%3d%%)" % (long, total, perc),
                    else :
                        print >> ofp, "%16s"%"N/A",
                print >> ofp, ""

def time_duration(t1, t2) :
    return int( (int(t1[1:]) - int(t2[1:])) / 1000.)

DescriptiveStats = \
    namedtuple("DescriptiveStats", ["min", "max", "mean", "counts"])

class DescriptiveStats(
    namedtuple("DescriptiveStats", ["min", "max", "mean",
                "counts", "frac_ge_mean", "frac_near_max"])
) :
    def __str__(self) :
        return "(min=%7d   max=%7d   mean=%7d   counts=%5d  frac_ge_mean=%s \
        frac_near_max=%s)" % \
            (self.min, self.max, self.mean, self.counts, self.frac_ge_mean,
            self.frac_near_max)


def descriptive_stats(iterable) :
    if len(iterable) == 0 :
        return None
    amin, amax, amean = min(iterable), max(iterable), sum(iterable)/len(iterable)
    frac_ge_mean = len([it for it in iterable if it >= amean])
    frac_ge_mean = "%4d(%2d%%)" % (frac_ge_mean, (100.*frac_ge_mean)/len(iterable))
    frac_near_max = len([it for it in iterable if it+200 >= amax])
    frac_near_max = "%4d(%2d%%)" % (frac_near_max, (100.*frac_near_max)/len(iterable))
    return DescriptiveStats(amin, amax, amean, len(iterable),
        frac_ge_mean, frac_near_max)

if __name__ == "__main__" :
    filename = sys.argv[1]
    print "Analyzing ", filename
    process_journal_file(filename, "output")
