package br.pucrio.inf.lac.konoha.hyuga.command;

import java.io.BufferedWriter;
import java.io.File;
import java.io.FileWriter;
import java.io.IOException;
import java.io.PrintWriter;
import java.util.Deque;
import java.util.HashMap;
import java.util.Iterator;
import java.util.Map;
import java.util.Properties;
import java.util.Set;
import java.util.concurrent.LinkedBlockingDeque;

import org.apache.log4j.Logger;

import br.pucrio.inf.lac.konoha.hyuga.common.Pair;
import br.pucrio.inf.lac.konoha.hyuga.core.Output;
import br.pucrio.inf.lac.konoha.hyuga.core.output.DG2CEPEntry;
import br.pucrio.inf.lac.konoha.hyuga.core.output.OutputEntry;
import br.pucrio.inf.lac.konoha.hyuga.core.output.OutputType;

import com.espertech.esper.client.EPServiceProvider;

public class LogCommand extends Command {
    
    // Logger
    private static Logger LOGGER = Logger.getLogger(LogCommand.class.getCanonicalName());

    // Output Files
    private File outputFile;
    private File enrichedOutputFile;

    public LogCommand(String commandSTR, Properties properties, EPServiceProvider cepEngine) {
        super(commandSTR, cepEngine);
        
        try {
            String eps    = properties.getProperty("eps");
            String minPts = properties.getProperty("minPts");
            String fname  = "dg2cep" + eps + "x" + minPts;
            
            if (properties.containsKey("cellsub") && Boolean.parseBoolean(properties.getProperty("cellsub"))) {
                String minminPts = properties.getProperty("cellminminpts");
                String subdivnum = properties.getProperty("cellsubdivision");
                
                fname += "x" + minminPts + "x" + subdivnum;
            }
            
            outputFile         = new File(fname + ".csv");
            enrichedOutputFile = new File(fname + ".enriched.csv");
        
            outputFile.createNewFile();
            enrichedOutputFile.createNewFile();
        } catch (IOException ex) {
            LOGGER.error("Could not create file: " + outputFile.getName());
            LOGGER.error("Could not create file: " + enrichedOutputFile.getName());
        }
    }

    @Override
    public void execute() {
        long   basePeriod = Long.parseLong(commandSTR.split(" ")[1]);
        long   endPeriod  = Long.parseLong(commandSTR.split(" ")[2]);
        Output output     = Output.getInstance();
        
        PrintWriter outputPW = null;
        Deque<OutputEntry> outputQueue = new LinkedBlockingDeque<OutputEntry>(output.getOutputQueue());
        
        // Rebase output queue (i.e., add base period to the output queue entries)
        // Dumping output queue entries to output file
        try {
            outputPW = new PrintWriter(new BufferedWriter(new FileWriter(outputFile)));
            
            // Header
            outputPW.println("timestamp,type,cells,size,centroid,nodes");
            System.out.println("timestamp,type,cells,size,centroid,nodes");

            for (OutputEntry dg2cepOutputEntry : outputQueue) {
                long detectionPeriod = dg2cepOutputEntry.getTimestamp();
                long finalDetection  = basePeriod + detectionPeriod;

                dg2cepOutputEntry.setTimestamp(finalDetection);
                outputPW.println(dg2cepOutputEntry);
                System.out.println(dg2cepOutputEntry);
                
                if (dg2cepOutputEntry.getTimestamp() > endPeriod) {
                    break;
                }
            }
        } catch (Exception ex) {
            LOGGER.error("Error at dumping output queue to file " + outputFile.getName());
        } finally {
            outputPW.flush();
            outputPW.close();
        }
        
        // Enriched Output
        PrintWriter enrichedPW = null;
        Map<Set<Pair<Integer,Integer>>, DG2CEPEntry> currentClusters = new HashMap<>();
        
        // Writting entries to enriched output file
        try {
            enrichedPW = new PrintWriter(new BufferedWriter(new FileWriter(enrichedOutputFile)));
            
            // Header
            enrichedPW.println("timestamp,type,cells,size,centroid,nodes");
            
            // Lets iterate (again) trough the output queue
            Iterator<OutputEntry> it = outputQueue.iterator();
            DG2CEPEntry dg2cepOutputEntry = (DG2CEPEntry) it.next();
            long currentMilliSecond  = basePeriod;
            long entryTimestamp = 0;
            
            while (currentMilliSecond <= endPeriod) {
                entryTimestamp = dg2cepOutputEntry.getTimestamp();
                
                if ((currentMilliSecond < entryTimestamp) || !it.hasNext()) {
                    if (currentClusters.size() == 0) {
                        enrichedPW.println(currentMilliSecond + ",NONE");
                        System.out.println(currentMilliSecond + ",NONE");
                    } else {
                        printCurrentClusters(currentMilliSecond, enrichedPW, currentClusters);
                    }
                    currentMilliSecond += 1000;
                } else {
                    // Is it a disperse event or an add/update/merge?
                    if (dg2cepOutputEntry.getType() == OutputType.DISPERSE) {
                        // Discover if currentClusters has this or a similar version of this cluster
                        if (containsCluster(dg2cepOutputEntry, currentClusters)) {
                            // Remove it from currentClusters
                            removeClusters(dg2cepOutputEntry, currentClusters);
                        }
                    } else {
                        // Discover if currentClusters has this or a similar version of this cluster
                        if (containsCluster(dg2cepOutputEntry, currentClusters)) {
                            // Update currentClusters
                            updateClusters(dg2cepOutputEntry, currentClusters);
                        } else {
                            if (dg2cepOutputEntry.getSize() != 0) {
                                // Add to currentClusters
                                currentClusters.put(dg2cepOutputEntry.getClusterCells(), dg2cepOutputEntry);
                            }
                        }
                    }
                    if (it.hasNext()) {
                        try {
                            dg2cepOutputEntry = (DG2CEPEntry) it.next();
                        } catch (Exception e) {
                            System.out.println("hasNext() cannot be trusted");
                        }
                    } else {
                        currentMilliSecond += 1000;
                    }
                }
            }
        } catch (Exception ex) {
            LOGGER.error("Error at dumping output queue to file " + enrichedOutputFile.getName());
        } finally {
            enrichedPW.flush();
            enrichedPW.close();
        }
        
        System.out.println("--------");
        for (Set<Pair<Integer,Integer>> key : currentClusters.keySet()) {
            System.out.println(currentClusters.get(key).getCentroidSTR());
        }
        System.out.println("--------");
        for (Set<Pair<Integer,Integer>> key : currentClusters.keySet()) {
            System.out.println(currentClusters.get(key));
        }
        
    }
    
    private void printCurrentClusters(long currentMilliSecond, PrintWriter pw, 
                                      Map<Set<Pair<Integer,Integer>>, DG2CEPEntry> currentClusters) {
        if (currentClusters.isEmpty()) {
            pw.println(currentMilliSecond + ",NONE");
            System.out.println(currentMilliSecond + ",NONE");
        } else {
            for (Set<Pair<Integer,Integer>> clusterEntry : currentClusters.keySet()) {
                pw.println(currentClusters.get(clusterEntry).toString(currentMilliSecond));
                System.out.println(currentClusters.get(clusterEntry).toString(currentMilliSecond));
            }
        }
    }
    
    private boolean containsCluster(DG2CEPEntry dg2cepEntry, Map<Set<Pair<Integer,Integer>>, DG2CEPEntry> currentClusters) {
        boolean contains = false;
        Iterator<Set<Pair<Integer, Integer>>> it = currentClusters.keySet().iterator();
        
        while (it.hasNext()) {
            Set<Pair<Integer,Integer>> currentEntry = it.next();

            for (Pair<Integer,Integer> pair : dg2cepEntry.getClusterCells()) {
                if (currentEntry.contains(pair)) {
                    contains = true;
                    break;
                }
            }          
            
            if (contains) { 
                break;
            }
        }

        return contains;
    }
  
    
   private void removeClusters(DG2CEPEntry dg2cepEntry, Map<Set<Pair<Integer,Integer>>, DG2CEPEntry> currentClusters) {
       Iterator<Set<Pair<Integer, Integer>>> it = currentClusters.keySet().iterator();
       
       while (it.hasNext()) {
           Set<Pair<Integer,Integer>> currentEntry = it.next();

           for (Pair<Integer,Integer> pair : dg2cepEntry.getClusterCells()) {
               if (currentEntry.contains(pair)) {
                   it.remove();
                   break;
               }
           }          
       }
    }
    
   private void updateClusters(DG2CEPEntry dg2cepEntry, Map<Set<Pair<Integer,Integer>>, DG2CEPEntry> currentClusters) {
       if (dg2cepEntry.getSize() == 0) {
           return;
       }

       Iterator<Set<Pair<Integer, Integer>>> it = currentClusters.keySet().iterator();
       while (it.hasNext()) {
           Set<Pair<Integer,Integer>> currentEntry = it.next();

           for (Pair<Integer,Integer> pair : dg2cepEntry.getClusterCells()) {
               if (currentEntry.contains(pair)) {
                   it.remove();
                   break;
               }
           }          
       }

       currentClusters.put(dg2cepEntry.getClusterCells(), dg2cepEntry);
    }
}
