'''
Created on May 9, 2010

@author: roni
'''

import re
import logging
from flowDetails import FlowDetails 
import os


class TraceParser(object):
    '''
    Parsing delays out of an NS2 trace
    '''
#    RESOURCES_DIR = "../../shared/resources"
#    TRACE_FILE_NAME = "%s/exp.tr" % RESOURCES_DIR
#    RESULTS_FILE_NAME = "%s/delays.txt" % RESOURCES_DIR
    REGEX_TRACE_PATTERN = "(?P<event_type>[-+rd]) (?P<sample_time>\d*\.?\d*) " + \
                        "(?P<from_node>\d*) (?P<to_node>\d*) \w* \d* -* " + \
                        "(?P<flow_id>\d*) (?P<source_addr>\d*).\d* " + \
                        "(?P<target_addr>\d*).\d* \d* (?P<packet_id>\d*)"

    def parse(self,trace_file_name):
        '''
        Parse an NS2 trace file and output average delay of agents
        '''
        logging.info("Parsing trace file %s" % trace_file_name)
        trace_file = file(trace_file_name,'r')
        self.trace_pattern = re.compile(TraceParser.REGEX_TRACE_PATTERN)
        start_times = dict() 
        current_paths = dict()
        flow_to_delays = dict()    
        flow_to_details = dict()
        line_index = -1
        for line in trace_file.readlines():
            logging.debug(line)
            line_index=line_index+1
            if line_index % 100000 == 0:
                logging.debug("Parsed %d lines so far" % line_index)
            match_result = re.search(self.trace_pattern,line)
            sample_time = match_result.group('sample_time')
            packet_id = int(match_result.group('packet_id'))
            from_node = int(match_result.group('from_node'))
                                                
            # If first time packet is seen - store start time 
            if start_times.has_key(packet_id)==False:
                start_times[packet_id]=sample_time
                current_paths[packet_id] = [from_node]
                continue
                
            
            # If reached a node - add to path, and if it is target - calculate delay
            event_type = match_result.group('event_type')
            target_addr = int(match_result.group('target_addr'))
            flow_id = int(match_result.group('flow_id'))
            to_node = int(match_result.group('to_node'))
                
            if event_type=='r':
                path = current_paths[packet_id]
                path.append(to_node)
                # If reached target
                if int(to_node)==int(float(target_addr)):                 
                    delay = float(sample_time)-float(start_times[packet_id]) 
                    
                    # If this is the first time that reached the target node                                               
                    if flow_to_details.has_key(flow_id)==False:
                        source_addr = int(match_result.group('source_addr'))  
                        flow_details = FlowDetails(flow_id, source_addr, target_addr)
                        flow_details.path = path        
                        flow_to_details[flow_id]=flow_details
                        flow_to_delays[flow_id]=[]
                    flow_to_delays[flow_id].append(delay)
                    
                    # Assert that there is only a single path
                    if path!=flow_to_details[flow_id].path:
                        raise ValueError("More than a single path to a flow. Recieved %s but already seen %s" % (path, flow_to_details[flow_id].path))
                
            if event_type=='d': # Packet dropped
                start_times.pop(packet_id)
                current_paths.pop(packet_id)
        trace_file.close()
         
        # Update delay and packet count in flow details       
        for flow_id in flow_to_delays.keys():
            values = flow_to_delays[flow_id]
            details = flow_to_details[flow_id]
            packet_count = len(values)
            average_delay = sum(values, 0.0) / packet_count
            
            details.setPacketCount(packet_count)
            details.setDelay(average_delay)

        return flow_to_details
        
    def write_delays_to_file(self,flow_to_details,file_name):
        ''' Write to a file for each flow its details and observed delay '''
        logging.info("Exporting delays %s" % file_name)    
        output_file = file(file_name,'w')    
        for flow_id in flow_to_details.keys():
            flowDetails = flow_to_details[flow_id]            
            from_node = flowDetails.source
            to_node = flowDetails.target
            average_delay = flowDetails.delay
            packet_count = flowDetails.packet_count
            output_file.write("%s: %s->%s %f2.8 (%d)\n" % \
                              (flow_id,from_node, to_node, average_delay,packet_count))
        output_file.close()
    
    def save_flow_to_details(self, flow_to_details ,file_name):
        ''' Writes a flow_to_details dictionary to the given file '''
        out_file = file(file_name,'w')
        for flow_id in flow_to_details.keys():
            flow_details = flow_to_details[flow_id]
            out_file.write("%d,%d,%d,%f,%d\n" % (flow_id, flow_details.source,\
                                              flow_details.target,flow_details.delay,
                                              flow_details.packet_count))
        out_file.close()
            
    def load_flow_to_details(self, file_name):
        ''' Load a flow_to_details dictionary from a file '''        
        out_file = file(file_name,'r')
        flow_to_details = dict()
        for line in out_file.readlines():
            parts = line.split(',')
            flow_id = int(parts[0].strip())
            source_node = int(parts[1].strip())
            destination_node = int(parts[2].strip())
            delay = float(parts[3].strip())
            packet_count = int(parts[4].strip())
            flow_details = FlowDetails(flow_id, source_node, destination_node)
            flow_details.delay = delay
            flow_details.packet_count = packet_count
            flow_to_details[flow_id]=flow_details  
            flow_details.source
            flow_details.target      
        out_file.close()
        return flow_to_details
        
        
if __name__ == '__main__':
    logging.basicConfig(level=logging.DEBUG)
    parser = TraceParser()
    