/*
 * Composition Knowledge Miner. A data mashup composition knowledge miner
 * Copyright (C) 2011 Carlos Rodriguez
 *
 * This program is free software; you can redistribute it and/or
 * modify it under the terms of the GNU General Public License
 * as published by the Free Software Foundation; either version 2
 * of the License, or (at your option) any later version.
 *
 * This program is distributed in the hope that it will be useful,
 * but WITHOUT ANY WARRANTY; without even the implied warranty of
 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
 * GNU General Public License for more details.
 *
 * You should have received a copy of the GNU General Public License
 * along with this program; if not, write to the Free Software
 * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA  02110-1301, USA.
 */

package eu.ict_omelette.mashup.compositionminer;

import eu.ict_omelette.mashup.canonicalmodel.CanonicalMashup;
import eu.ict_omelette.mashup.canonicalmodel.Dataflow;
import java.util.Iterator;
import java.util.LinkedHashMap;
import java.util.LinkedList;

/**
 *
 * @author carlos
 */
public class Util {


    public static LinkedHashMap getRecurrentConnectors(LinkedList<CanonicalMashup> compRepo, Double minSupp){

        LinkedHashMap<String, Double> suppHash = new LinkedHashMap<String, Double>();
        LinkedHashMap<String, LinkedList> resultHash = new LinkedHashMap<String, LinkedList>();
        LinkedHashMap<String, LinkedList> globalItemset = new LinkedHashMap();

        Iterator<CanonicalMashup> repoIt = compRepo.iterator();

        while(repoIt.hasNext()){
            CanonicalMashup canModel = repoIt.next();
            Iterator<Dataflow> dfIt = canModel.getDataFlows();
            LinkedHashMap<String, String> hashUniqDfs = new LinkedHashMap();

            //Here, we create a list of UNIQUE dataflows (for the current composition)
            //that we'll use for computing their support (normalized over the
            //total number of compositions)
            while(dfIt.hasNext()){
                Dataflow df = dfIt.next();
                String strFromComp = df.getFromComponent().getType();
                String strFromPort = df.getFromPort();

                String strToComp = df.getToComponent().getType();
                String strToPort = df.getToPort();

                String strHashDf = strFromComp +"."+strFromPort+"=>"+strToComp+"."+strToPort;

                hashUniqDfs.put(strHashDf, strHashDf);

                LinkedList ll = globalItemset.get(strHashDf);
                String pipeId = canModel.getId();
                pipeId = pipeId.replaceAll("/tmp/sport_news/pipe_", "");
                if(ll == null){
                    ll = new LinkedList();
                    ll.add(pipeId);
                    globalItemset.put(strHashDf, ll);
                }else{
                    ll.add(pipeId);
                    globalItemset.put(strHashDf, ll);
                }

            }

            Iterator <String> itUniqDfs = hashUniqDfs.keySet().iterator();
            while(itUniqDfs.hasNext()){
                String key = itUniqDfs.next();

                if(suppHash.get(key) == null){
                    suppHash.put(key, new Double(1.0));
                }else{
                    Double supp = suppHash.get(key);
                    supp = supp + 1.0;
                    suppHash.put(key, supp);
                }
            }
        }

        Iterator<String> itHash = suppHash.keySet().iterator();
        while(itHash.hasNext()){
            String key = itHash.next();
            Double value = suppHash.get(key);
            Double ratio = value / compRepo.size();

            if(ratio >= minSupp){
                resultHash.put(key, globalItemset.get(key));
            }
        }

        //The result is a HashMap that contains as key a hash that represents a dataflow in the form of
        //"componentX.attribute1=>componentY.attribute2" and as value the list
        //of pipeIds that contain such dataflow
        return resultHash;
    }

    public static LinkedList<Dataflow> getConnectorInstances(LinkedList<CanonicalMashup> compRepo, LinkedHashMap recConn){
        LinkedList<Dataflow> db = new LinkedList();

        Iterator<CanonicalMashup> cmIt =  compRepo.iterator();
        while(cmIt.hasNext()){
            CanonicalMashup cm = cmIt.next();

            Iterator<Dataflow> dfIt = cm.getDataFlows();
            while(dfIt.hasNext()){
                Dataflow df = dfIt.next();
                String strFromComp = df.getFromComponent().getType();
                String strFromPort = df.getFromPort();
                String strToComp = df.getToComponent().getType();
                String strToPort = df.getToPort();
                String strHashDf = strFromComp +"."+strFromPort+"=>"+strToComp+"."+strToPort;

                Iterator <String> itRecCon = recConn.keySet().iterator();
                while(itRecCon.hasNext()){
                    String strConn = itRecCon.next();

                    if(!strConn.equals(strHashDf))
                        continue;

                    db.add(df);
                }
            }
        }

        return db;
    }

}
