/**
 *
 *  Util.Plot.Analyzer, V0.7
 *
 *  @author Michael Liljenstam
 *
 */

package SSF.Util.Plot;

import SSF.OS.NetFlow.*;
import SSF.Util.Streams.*;
import SSF.OS.NetFlow.Filter.*;
import com.renesys.raceway.DML.*;

import java.math.*;

/**
 *  Analyzes the raw binary data streams from the simulation to and 
 *  bins it into time-bins. Queue monitor data is already binned but 
 *  may be aggregated into larger bins. The raw flow data, on the other 
 *  hand, is binned by computing the fraction of the (continuous) flow 
 *  covering each bin. The result is that the time-bin holds the number 
 *  of active flows at each time point. <br><br>
 *
 *  Extends the FilterPlayer class to override some if its methods, 
 *  primarily receive() and configFilters(). Changes the receive()
 *  behavior, but also uncludes multiple filters instead of the 
 *  single filter in FilterPlayer. This permits multiple datasets 
 *  to be generated from a single pass through the data file.
 *
 *  $Id: Analyzer.java,v 1.12 2003/02/13 19:31:32 mili Exp $
 */

public class Analyzer extends FilterPlayer {

    /*
     *  Constants
     */

    // field types enumerations (some records have multiple fields)
    public static final char UNSPEC           = 0;
    public static final char QREC_LEN         = 1;
    public static final char QREC_PKTCOUNT    = 2;
    public static final char QREC_PKTDROPCNT  = 3;
    public static final char QREC_PKTDIFF     = 4;
    public static final char QREC_PKTDROPDIFF = 5;

    // corresponding symbolic names
    /*
      public static final String[] FIELD_SYMBS = 
      {"unspec", "qlen", "pktcnt", "pktdropcnt", "pktdiff", 
      "pktdropdiff"};
    */

    ///////////////////////////////////////////////////////////////////
    ////                     private variables                     ////

    /*
     *  Attributes
     */

    /**  Number of datasets */
    private int    _numDatasets;

    /**  File name to read from */
    private String _fileName;

    /**  Time-bin size in the time-series histogram	 */
    private float  _binSize;


    /**  Time series of measurements. "Vector of vectors" with one 
	 time-series per dataset (one dataset per filter). */
    private SelfExpandingFloatArray[] _timeBins;
    /**  Counters for each time-bin, for running mean */
    private SelfExpandingIntArray[]   _timeBinCounts;

    /**  Multiple filters instead of FilterPlayers single filter. */
    private Filter[] _filters;

    /**  Field selectors for each filter. */
    private char[]   _fieldSelectors;


    /**  Running statistics variable: prev value*/
    private int[]     _prevValues;
    /**  Running statistics variable: current max */
    private int[]     _currentMaxs;
    /**  Running statistics variable: prev bin number */
    private int[]     _prevBinNums;

    // attributes for consistency-checking bin-size against the probe-intervals.
    /**  Set of probe intervals encountered for each dataset */
     //	private HashSet[] _probeIntervals;
    private int[]     _probeIntervals; // elems initialized to 1
    /**  Exponent for common order of magnitude */
    private int       _freqExponent;
    /**  Store first probe time to measure interval */
    private int[]     _prevProbeTime;
    /**  Keep track of num of records per probe */
    private int[]     _numRecForProbe;

    /**  Number of probes per bin. */
    private int[]     _probesPerBin;

    // attributes for consistency checking record types within dataset

    /**  Used to ensure that record types are consistent in dataset. */
    private int[]     _recordTypeIdsFound;
    /**  First record found for dataset. */
    private boolean[] _recordFoundForSet;

    // user feedback attributes

    /**  Flag to keep track of which warning messages have already 
	 been displayed. */
    private boolean[] _warningDisplayedFlags;
    // warning types enum
    private static final char WARN_NO_FIELD_SPEC = 0;
    private static final char WARN_MULT_REC_TYPES = 1;
    private static final char WARN_BIN_SIZE      = 2;
    

    /** Counter for number of records. For progress indication. */
    private long      _recordCounter = 0;


    ///////////////////////////////////////////////////////////////////
    ////                     constructors & methods                ////


    /**
     *  Constructs an analyzer object for streamID that quantizes the 
     *  timepoints in the data into bins of size binSize.
     */
    public Analyzer(int numDatasets, String fileName, String streamID)
    {
	super(streamID);
	_numDatasets = numDatasets;
	_fileName = fileName;

	// set up time-series histograms
	_timeBins = new SelfExpandingFloatArray[_numDatasets];
	_timeBinCounts = new SelfExpandingIntArray[_numDatasets];
	for (int i=0; i<_numDatasets; i++) {
	    _timeBins[i] = new SelfExpandingFloatArray();
	    _timeBinCounts[i] = new SelfExpandingIntArray();
	}

	// init warning display flags
	_warningDisplayedFlags = new boolean[3]; // two warning types

	// init statistics counting attr's
	_prevValues = new int[_numDatasets];
	_currentMaxs = new int[_numDatasets];
	_prevBinNums = new int[_numDatasets];
 	for (int i=0; i<_numDatasets; i++) {
	    _prevValues[i] = 0; // init elements
	    _currentMaxs[i] = 0; // init elements
	    _prevBinNums[i] = 0; // init elements
	}

	// init probe interval consistency check attrs
	_probeIntervals = new int[_numDatasets];
 	_prevProbeTime = new int[_numDatasets];
 	_numRecForProbe = new int[_numDatasets];
 	_probesPerBin = new int[_numDatasets];
 	for (int i=0; i<_numDatasets; i++) {
	    _probeIntervals[i] = 1; // init elements
	    _prevProbeTime[i] = 0;
	    _numRecForProbe[i] = 0;
	    _probesPerBin[i] = 0;
	}
	_freqExponent = 0;

	_recordTypeIdsFound = new int[_numDatasets];
	_recordFoundForSet = new boolean[_numDatasets];
    }


    /**
     *  Bin data from stream into given array using given bin size.
     */
    public void binToArray(Object[][] dataArray, float binSize) 
	throws streamException {

	_binSize = binSize;

	// read file and analyze
	connectRead(_fileName);
	wrapUp();

	// calc mean from stored sums for queue datasets
	calcQueueMeans();

	// find the longest histogram to pad all others to same length
	int maxLen = 0;
 	for (int i=0; i<_numDatasets; i++) {
	    if (_timeBins[i].size() > maxLen)
		maxLen = _timeBins[i].size();

	    // also sanity check... while we're at it
	    if (_timeBins[i].size() == 0) {
		System.err.println("Warning: Empty dataset! (for filter argument number " + (i + 1) + ")");
	    }
	}

	// create array from histograms
	for (int i=0; i<_numDatasets; i++) {
	    // do padding
	    if (_timeBins[i].size() < maxLen)
		_timeBins[i].update(maxLen-1, 0); // set elem to zero

	    // create array row
	    dataArray[i] = _timeBins[i].toArray();
	}
    }


    ///////////////////////////////////////////////////////////////////
    ////                     Internal methods                      ////



    /**
     *  Assign multiple filters to this "Player"/Analyzer.
     */
    public void assignFilters(Filter[] pFilters)
    {
	_filters = pFilters;
    }

    /**
     *  Assign a field selector per filter.
     */
    public void assignFieldSelectors(char[] fieldSelectors)
    {
	_fieldSelectors = fieldSelectors;
    }


    /**
     *  Configuration of filter given the name of a DML file that 
     *  contains the filter configuration information.
     */
    public void configFilters(String[] configFileNames)
    {
	Filter[] pFilter = new Filter[configFileNames.length];

	for (int i=0; i<configFileNames.length; i++) {

	    dmlConfig filterConfig = new dmlConfig(configFileNames[i]);

	    Configuration fConfig = (Configuration)filterConfig.findSingle(".filter");
	    if (fConfig == null) {
		System.err.println("Configuration of filter must define a .filter attribute");
		System.err.println(fConfig);
		System.exit(-1);
	    }

	    try {
		pFilter[i] = new Filter(this, fConfig);
	    }
	    catch (configException cfe) {
		System.err.println(cfe.getMessage());
		System.exit(-1);
	    }
	}
	
	//for debug
	System.err.println("Completed configuring the filter(s)");
	    
	assignFilters(pFilter);
    }

    /**
     *  Log a dataflow record by incrementing all the time-bins that 
     *  span the flows life-time. <br>
     *  Time bin definition: for binSize=1.0, bin zero will span the 
     *  interval [0,1), bin 1 = [1, 2), and so on...
     */
    private int logRecord(int dataSet, int tid, int sid, double time, 
			  byte[] buf, int offset, int length) 
    {
	IpNetFlow flow;

	if (getRecordTypeString(tid).equals("SSF.OS.NetFlow")) {

	    // IP net flow data

	    flow = new IpNetFlow();
	    flow.readFromBytes(buf, length);

	    // start & end bins
	    int startBin = (int) Math.floor(flow.First / _binSize);
	    int endBin = (int) Math.floor(flow.Last / _binSize); 

	    // a line-graph should start gradually during the first 
	    // interval, whereas a bargraph should start directly
	    /*		if (!barGraph)
			startBin++; */

	    // increment vector in interval start - end (whole intervals)
	    for (int i=startBin; i<=endBin; i++) {
		if (i == startBin && i == endBin) {
		    // flow within a single bin, compute fraction
		    _timeBins[dataSet].
			update(i, +((flow.Last - flow.First)/_binSize));

		    //			System.out.println("update contain-bin " + i + " by " +
		    //					   +((flow.Last - flow.First)/_binSize));
		} else if (i == startBin) {
		    // increment start bin fraction
		    _timeBins[dataSet].
			update(i, +(((startBin+1)*_binSize - flow.First))/_binSize);

		    //			System.out.println("update start-bin " + i + " by " +
		    //					   +(((startBin+1)*_binSize - flow.First))/_binSize);
		} else if (i == endBin) {
		    // increment end bin fraction
		    _timeBins[dataSet].
			update(i, +((flow.Last - endBin*_binSize))/_binSize);

		    //			System.out.println("update end-bin " + i + " by " +
		    //					   +((flow.Last - endBin*_binSize))/_binSize);
		} else {
		    _timeBins[dataSet].
			update(i, +1); // increment element 
		}
	    }

	    return 0;
	} // end flow record alternative
	//	else if (getRecordTypeString(tid).equals("SSF.Net.QueueRecord")) {
	else if (getRecordTypeString(tid).equals("SSF.Net.QueueRecord_1")) { // SSFNet 1.2.1

	    // Queue statistics data

	    float qLength = BytesUtil.bytesToFloat(buf, 0);
	    int pktCount = BytesUtil.bytesToInt(buf, 4);
	    int pktDropCount = BytesUtil.bytesToInt(buf, 8);

	    int binNum = (int) Math.floor(time / _binSize);
	    float value;
	    if (_fieldSelectors == null) {
		if (!_warningDisplayedFlags[WARN_NO_FIELD_SPEC]) {
		    System.err.println("Warning no field specified. Using default: qlen");
		    _warningDisplayedFlags[WARN_NO_FIELD_SPEC] = true;
		}
		value = qLength;
	    } else {
		switch (_fieldSelectors[dataSet]) {
		case QREC_LEN:
		    value = qLength;
		    break;
		case QREC_PKTCOUNT:
		    value = pktCount;
		    break;
		case QREC_PKTDROPCNT:
		    value = pktDropCount;
		    break;
		case QREC_PKTDIFF:
		    /*		    if (pktCount > 0)
				    System.err.println(pktCount + "," +
				    _prevValues[dataSet] + "," +
				    _currentMaxs[dataSet]); */
		    value = pktCount - _prevValues[dataSet]; // compute differential
		    if (binNum > _prevBinNums[dataSet]) { // moved to next bin?
			_prevValues[dataSet] = _currentMaxs[dataSet];
			_currentMaxs[dataSet] = 0;
			_prevBinNums[dataSet]++;
		    }
		    _currentMaxs[dataSet] = Math.max(_currentMaxs[dataSet],
						     pktCount);
		    break;
		case QREC_PKTDROPDIFF:
		    /*		    if (pktDropCount > 0)
				    System.err.println(pktDropCount + "," +
				    _prevValues[dataSet] + "," +
				    _currentMaxs[dataSet]); */
		    value = pktDropCount - _prevValues[dataSet]; // compute differential
		    if (binNum > _prevBinNums[dataSet]) { // moved to next bin?
			_prevValues[dataSet] = _currentMaxs[dataSet];
			_currentMaxs[dataSet] = 0;
			_prevBinNums[dataSet]++;
		    }
		    _currentMaxs[dataSet] = Math.max(_currentMaxs[dataSet],
						     pktDropCount);
		    break;
		default:
		    if (!_warningDisplayedFlags[WARN_NO_FIELD_SPEC]) {
			System.err.println("Warning no field specified. Using default: qlen");
			_warningDisplayedFlags[WARN_NO_FIELD_SPEC] = true;
		    }
		    value = qLength;
		} // end field selector switch
	    } // end queue record alternative

 	    /*
	    // compute running mean

	    int numSamples = _timeBinCounts[dataSet].getVal(binNum);
	    float newMean = value; // if first sample, otherwise 
	    // updated later

	    if (numSamples > 0) { // not first sample
		float prevVal = _timeBins[dataSet].getVal(binNum);
		numSamples++;
		newMean = 
		    (prevVal * ((float) (numSamples-1) / numSamples)) 
		    + (value / numSamples);
	    }
	    // update mean
	    if (newMean > 0) // truncate to the right, if no data
		_timeBins[dataSet].set(binNum, newMean); // set element
	    // update sample counter
	    _timeBinCounts[dataSet].update(binNum, +1);
 	    */

	    // update sum
	    float tmpVal = _timeBins[dataSet].getVal(binNum);
	    tmpVal += value;
	    _timeBins[dataSet].set(binNum, tmpVal); // set element
	
	    return 0;
	}
	/*
	else if (getRecordTypeString(tid).equals("SSF.Net.QueueProbeIntRecord")) {
	    double probeInt = BytesUtil.bytesToFloat(buf, 0);
	    checkProbeInterval(dataSet, probeInt);
	    return 0;
	    } 
	*/
	else {
	    System.err.println("Error: Unrecognized record type in file.");
	    System.exit(-1);
	    return super.receive(tid, sid, time, buf, offset, length);
	}
    }


    /**
     *  Callback method for action to do for each read record.
     *  Check each read record against each specified dataset. If a filter has 
     *  been selected for the dataset first check if the record passes the 
     *  filter. The intercept queue probe interval records. These should just 
     *  be collected for each dataset, but "do not count" as data records.
     *  Thus, two different types of data records for the same dataset is an 
     *  error, but a probe interval record is not an error in conjunction 
     *  with queue data records. On the contrary, it is required for averaging 
     *  of queue length data.
     */
    public int receive(int tid, int sid, double time, 
		       byte[] buf, int offset, int length)
    {
	String src;
	String typ;

	FilterData fdata = 
	    new FilterData(tid, sid, time, buf);

	int    retval = 0;

	/** FIXME: not sure it's ok to accumulate the return values here.
	    but it seems that 0 should be normal return code. Assuming 
	    that the rest is abnormal. 
	*/

	for (int i=0; i<_numDatasets; i++) {
	    if (_filters == null || i >= _filters.length) {
		if (!getRecordTypeString(tid).equals("SSF.Net.QueueProbeIntRecord")) {
		    if (_recordFoundForSet[i] == false 
			|| tid == _recordTypeIdsFound[i]) {

			retval += 
			    logRecord(i, tid, sid, time, buf, offset, length);

			_recordFoundForSet[i] = true;
			_recordTypeIdsFound[i] = tid;
		    } else
			printRecTypeWarning(getRecordTypeString(_recordTypeIdsFound[i]));
		} else {
		    double probeInt = BytesUtil.bytesToFloat(buf, 0);
		    checkProbeInterval(i, probeInt);
		}
	    } else if (_filters[i].passFilter(fdata)) {
		if (!getRecordTypeString(tid).equals("SSF.Net.QueueProbeIntRecord")) {
		    if (_recordFoundForSet[i] == false 
			|| tid == _recordTypeIdsFound[i]) {

			retval += 
			    logRecord(i, tid, sid, time, buf, offset, length);

			_recordFoundForSet[i] = true;
			_recordTypeIdsFound[i] = tid;
		    } else
			printRecTypeWarning(getRecordTypeString(_recordTypeIdsFound[i]));
		} else {
		    double probeInt = BytesUtil.bytesToFloat(buf, 0);
		    checkProbeInterval(i, probeInt);
		}
	    }
	}

	// progress indication
	_recordCounter++;
	if ((_recordCounter % 1000) == 0) {	// every 1000 records
	    System.err.print(".");
	}

	return retval;
    }

    /**
     * addType
     * copied from FilterPlayer and modified to handle multiple filters.
     * 
     * @param spec the string generated from the record.
     */
    protected void addType(String spec) 
	throws streamException
    {
	int sp = spec.indexOf(" ");
	if (sp > 0) {
	    Integer I = new Integer(spec.substring(0,sp));
	    String N = spec.substring(sp+1).trim();
	    typeDictionary.put(I,N);
	    typeDictionary.put(N,I);

	    //put in decoder for the filter
	    //if this is not a record type, no decoder will be added.
	    if (_filters!=null)
		for (int i=0; i<_filters.length; i++)
		    _filters[i].addDecoder(N, I);
	} 
	else throw new streamException("Bad code: "+spec);
    }

    /**
     *  Compute means for queue statistics for each bin based on 
     *  sum & number of probes.
     */
    private void calcQueueMeans()
    {
       for (int i=0; i<_numDatasets; i++) {

	   // sanity check
	   if (_recordFoundForSet[i]) {
	       if (getRecordTypeString(_recordTypeIdsFound[i]).equals("SSF.Net.QueueRecord_1") && (_fieldSelectors[i] == QREC_LEN)) {

		   /*
		     System.err.println("_probeInt[i]=" + _probeIntervals[i] + 
		     ", freqexp=" + _freqExponent);

		     int probesPerBin = (int)
		     Math.round(_binSize /
		     (_probeIntervals[i] * Math.pow(10, _freqExponent)));
		   */
		   if (_probesPerBin[i] > 0) {
		       System.err.println("dataset " + i + " probes/bin: " 
					  + _probesPerBin[i]);
		   } else {
		       System.err.println("Error: Unknown number of samples per bin for queue length data.");
		       System.err.println("       No QueueProbeIntRecord found for dataset.");
		       System.exit(-1);
		   }

		   for (int j=0; j<_timeBins[i].size(); j++) {
		       // compute mean from sum & 
		       float tmpVal = _timeBins[i].getVal(j);
		       //		 tmpVal /= probesPerBin;
		       tmpVal /= _probesPerBin[i];
		       _timeBins[i].set(j, tmpVal); // set element
		   }
	       }
	   } else {
	       System.err.println("Warning: Empty dataset! (for filter argument number " + (i + 1) + ")");
	   }
       }
    }


    /**
     *  Wrap-up. Just terminate progress printout.
     */
    public void wrapUp()
    {
	System.err.println("");
    }

    /**
     *  Consistency check binSize in relation to probe intervals.
     */
    private void checkProbeInterval(int dataSet, double time)
    {
	_probesPerBin[dataSet] += (int) Math.round(_binSize / time);

	// first simple check
	float tm = (float) time;
	if (_binSize < tm) {
	    System.err.println("Error: bin size (" + 
			       _binSize + 
			       " s) smaller than NIC probe resolution (" +
			       tm + " s)");
	    System.exit(1);
	}

	/** FIXME : disabling test since it's not finished

	_numRecForProbe[dataSet]++; // update record counter

	//	System.err.println("** check : " + _numRecForProbe[dataSet]);

  	if (_numRecForProbe[dataSet] == 1) { // first record?
	    updateProbeIntervalExponent(time);
	    int normProbeInt = (int) (time / Math.pow(10, _freqExponent));
	    _probeIntervals[dataSet] = normProbeInt;
	}
  	if (_numRecForProbe[dataSet] > 1) { // later record?

	    int newProbeTime = (int) Math.round(time / Math.pow(10, _freqExponent));

	    if (_probeIntervals[dataSet] != 
		    newProbeTime - _prevProbeTime[dataSet]) {
		if (!_warningDisplayedFlags[WARN_BIN_SIZE]) {
		    //		    System.err.println("new tm " + time / Math.pow(10, _freqExponent));
		    System.err.println("WARNING! Aperiodic probe intervals. Found " + (newProbeTime - _prevProbeTime[dataSet]) + ", prev interval " + _probeIntervals[dataSet] + " (e" + _freqExponent + ")");
		    System.err.println("Probable inconsistency in dataset selection.");
		    //		    System.err.println("WARNING! time bin size does not match probe intervals for plot.");
		    //		    System.err.println("May lead to distorted results. (See documentation for details.)");
		    _warningDisplayedFlags[WARN_BIN_SIZE] = true;
		}
	    }
	}
    	if (_numRecForProbe[dataSet] == 1) { // first record?
	    updateProbeIntervalExponent(time);
	}
	// store time
	_prevProbeTime[dataSet] = (int) Math.round(time / Math.pow(10, _freqExponent));

	*/
    }

    /**
     *  Find common order-of-magnitude for probe intervals.
     */
    private void updateProbeIntervalExponent(double time)
    {
	_freqExponent = (int) Math.floor(Math.log(time) / Math.log(10));

	System.err.println("set exp ** time =" + time +
			   ", freqexp=" + _freqExponent);
    }


   /**
    *  Print record type warning message.
    */
    private void printRecTypeWarning(String recTypeUsed)
    {
	if (!_warningDisplayedFlags[WARN_MULT_REC_TYPES]) {
	    System.err.println("Warning! Dataset contains multiple record types.");
	    System.err.println("Using first found: " + recTypeUsed);
	    _warningDisplayedFlags[WARN_MULT_REC_TYPES] = true;
	}
    }
}


/*=                                                                      =*/
/*=  Copyright (c) 1997--2000  SSF Research Network                      =*/
/*=                                                                      =*/
/*=  SSFNet is open source software, distributed under the GNU General   =*/
/*=  Public License.  See the file COPYING in the 'doc' subdirectory of  =*/
/*=  the SSFNet distribution, or http://www.fsf.org/copyleft/gpl.html    =*/
/*=                                                                      =*/
