/*
 * To change this template, choose Tools | Templates
 * and open the template in the editor.
 */

package edu.isi.pegasus.planner.subworkflow.partitioner.implementation;

import edu.isi.pegasus.planner.subworkflow.partitioner.BackwardPartitioner;
import edu.isi.pegasus.planner.subworkflow.partitioner.DAXAbstraction;
import edu.isi.pegasus.planner.catalog.site.classes.SiteCatalogEntry;
import edu.isi.pegasus.planner.classes.ADag;
import edu.isi.pegasus.planner.classes.PegasusBag;
import edu.isi.pegasus.planner.dax.ADAG;
import edu.isi.pegasus.planner.dax.ParADAG;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.Iterator;
import java.util.LinkedList;
import java.util.Map;
import java.util.Vector;

/**
 *
 * @author wchen
 */
public class BackwardPartitionerRun7 extends BackwardPartitioner{

    protected LinkedList subQueue;
    protected String fanOutJob;

    public BackwardPartitionerRun7(ADag mDag, PegasusBag mBag)
    {
        super(mDag, mBag);
        subQueue = new LinkedList();
        fanOutJob = "";
        
    }

    protected int getBigList(ArrayList jobList, ArrayList nextList, String jobID)
    {
        int reVal = 0;
        int  depth = (Integer)depthMap.get(jobID);
        Vector pList = mDag.getParents(jobID);
        Vector cList = mDag.getChildren(jobID);
        if(cList.size() >  10)
        {
//            for(Iterator it = pList.iterator();it.hasNext();)
//            {
//                String jobname = (String )it.next();
//                if(!nextList.contains(jobname))
//                     nextList.add(jobname);
//            }
//            if(!jobList.contains(jobID))
//            {
//                jobList.add(jobID);
//            }
            reVal = 1;
            
            
        }
        else
        {}
        
        for(Iterator it = pList.iterator();it.hasNext();)
        {
            String jobname = (String )it.next();
            //edu.isi.pegasus.planner.classes.Job parent = getDagJob(jobname);
            String logicalID = jobID2jobLogicalID(jobname);
            String color = (String)mJobColor.get(logicalID);

            if(!depthMap.containsKey(jobname))
                depthMap.put(jobname, depth+1);
            if((color==null)|| (!color.equals("BLACK")))
            {
                  if(!jobList.contains(jobname))
                    {
                        subQueue.add(jobname);
                        jobList.add(jobname);
                        if(getBigList(jobList, nextList, jobname)==1)
                            reVal = 1;

                    }
            }



        }

    

        boolean flag = false;

        if (cList.size() > 2) fanOutJob = jobID;
        for(Iterator it = cList.iterator();it.hasNext();)
        {
            String jobname = (String )it.next();
            //edu.isi.pegasus.planner.classes.Job child = getDagJob(jobname);
            String logicalID = jobID2jobLogicalID(jobname);
            String color = (String)mJobColor.get(logicalID);
            if(!depthMap.containsKey(jobname))
                depthMap.put(jobname, depth-1);
            int i = (Integer)depthMap.get(jobname);
            if((color==null)||( !color.equals("BLACK")))
            {
                if(!jobList.contains(jobname))
                {
                    if( subQueue.contains(jobname))
                    {
                        jobList.add(jobname);
                        if(getBigList(jobList, nextList, jobname)==1)
                            reVal= 1;
                    }
                    else
                    {
                        flag =true;
                    }


                }
            }
            else if(color.equals("BLACK") && false)//this should be if that one is different to the current site index
            {
                if(!jobList.contains(jobname))
                {
                    if( subQueue.contains(jobname))
                    {
                        jobList.add(jobname);
                        if(getBigList(jobList, nextList, jobname)==1)
                            reVal= 1;
                    }
                    else
                    {
                        flag =true;
                    }


                }

            }

        }


        return reVal;
    }
    protected int getMediumList(ArrayList jobList, ArrayList nextList, String jobID)
    {
        int reVal = 0;
        int  depth = (Integer)depthMap.get(jobID);

        Vector cList = mDag.getChildren(jobID);
        if (cList.size() > 10)
        {
            fanOutJob = jobID;
            jobList.remove(jobID);
            if(!nextList.contains(jobID))
                nextList.add(jobID);

            return reVal;
        }


        Vector pList = mDag.getParents(jobID);
        for(Iterator it = pList.iterator();it.hasNext();)
        {
            String jobname = (String )it.next();
            //edu.isi.pegasus.planner.classes.Job parent = getDagJob(jobname);
            String logicalID = jobID2jobLogicalID(jobname);
            String color = (String)mJobColor.get(logicalID);

            if(!depthMap.containsKey(jobname))
                depthMap.put(jobname, depth+1);
            if((color==null)|| (!color.equals("BLACK")))

            {
                  if(!jobList.contains(jobname))
                    {
                        subQueue.add(jobname);
                        jobList.add(jobname);
                        if(getMediumList(jobList, nextList, jobname)==1)
                            reVal = 1;

                    }
            }



            }


        return reVal;
    }
    protected int getSmallList(ArrayList jobList, ArrayList nextList, String jobID)
    {
        int reVal = 0;
        int  depth = (Integer)depthMap.get(jobID);
        Vector pList = mDag.getParents(jobID);
        if(pList.size() > 10)
        {
            for(Iterator it = pList.iterator();it.hasNext();)
            {
                String jobname = (String )it.next();
                edu.isi.pegasus.planner.classes.Job parent = getDagJob(jobname);
                String color ="";

                if(!mJobColor.containsKey(parent.logicalId))
                {
                    mJobColor.put(parent.logicalId, "WHITE");
                    color = "WHITE";
                }
                else
                    color = (String)mJobColor.get(parent.logicalId);

                if(!color.equals("GRAY") && !color.equals("BLACK"))
                {
                    nextList.add(parent.jobID);
                    mJobColor.put(parent.logicalId, "GRAY");
                    //depthMap.put(parent.jobID, i+1);

                }
            }

        }
        else
        {

            for(Iterator it = pList.iterator();it.hasNext();)
            {
                String jobname = (String )it.next();
                //edu.isi.pegasus.planner.classes.Job parent = getDagJob(jobname);
                String logicalID = jobID2jobLogicalID(jobname);
                String color = (String)mJobColor.get(logicalID);

                if(!depthMap.containsKey(jobname))
                    depthMap.put(jobname, depth+1);
                if((color==null)|| (!color.equals("BLACK")))

                {
                      if(!jobList.contains(jobname))
                        {
                            subQueue.add(jobname);
                            jobList.add(jobname);
                            if(getSmallList(jobList, nextList, jobname)==1)
                                reVal = 1;

                        }
                }



            }
        }
        
        return reVal;
    }
 
  
        
          
    @Override
    public void run()
    {
        getProperties();
        ParADAG dax = new ParADAG("subworkflow" + mDaxMap.size());
        DAXAbstraction bigDax = new DAXAbstraction(dax);
        Map fList= new HashMap<String, Double>();
        mDaxMap.put(bigDax, fList);
        int siteIndex = 0;
        LinkedList siteList = new LinkedList<DAXAbstraction>();
        bigDax.site = siteIndex;
        siteList.add(bigDax);
        mIndexSiteList.put(siteIndex, siteList);
        //translateParallel();
        translate();
        edu.isi.pegasus.planner.classes.Job rear = getRear();
        
        
        mQueue.clear();
        mQueue.addLast( rear );
        int i = 0;

        SiteCatalogEntry se = (SiteCatalogEntry)mSiteIndex.get(siteIndex);
        double limit = Double.parseDouble(se.getEnvironmentVariable("SITESIZE")) *1e8;
        bigDax.limit = limit;
        ArrayList jobList = new ArrayList();
        ArrayList nextList = new ArrayList();
        while( !mQueue.isEmpty() ){

        /**There is a problem, it only works for fan-in and fan-out struct.
         * Pay attention to a parent has two children and one has reached
         **/

            edu.isi.pegasus.planner.classes.Job dagJob =
                    (edu.isi.pegasus.planner.classes.Job)mQueue.getLast();

            String ccr = (String)mJobColor.get(dagJob.logicalId);
            if((ccr != null) && ccr.equals("BLACK"))
            {
                mQueue.remove(dagJob);
                continue;
            }
            if(!depthMap.containsKey(dagJob.jobID))
            {
                i = 0;
                depthMap.put(dagJob.jobID, 0);
            }
            else
            {
                i = (Integer)depthMap.get(dagJob.jobID);
            }

//            jobList.clear();
//            nextList.clear();
//            subQueue.clear();
//            int testInt = getSmallList(jobList, nextList, dagJob.jobID);
//
//
//            jobList.clear();
//            nextList.clear();
//            subQueue.clear();
//
//            testInt = getMediumList(jobList, nextList, dagJob.jobID);
//            jobList.clear();
//            nextList.clear();
//            subQueue.clear();
//            testInt = getBigList(jobList, nextList, dagJob.jobID);


            
                int flag = 0;
                if(false)
                {
                    //to be out soon
                }
                else
                {

                    jobList.clear();
                    //jobList.add(dagJob.jobID);
                    subQueue.clear();
                    //First Step, big list ok
                    nextList.clear();
                    int reVal = 0;

                    if(dagJob.jobID.contains("mBgModel"))
                    {
                        reVal = getSmallList(jobList, nextList, dagJob.jobID);
                        if(!jobList.contains(dagJob.jobID))
                            jobList.add(dagJob.jobID);
                        reVal = 2;
                    }
                    else if(dagJob.jobID.contains("mShrink"))
                    {
                        reVal = getMediumList(jobList, nextList, dagJob.jobID);
                        if(!jobList.contains(dagJob.jobID))
                            jobList.add(dagJob.jobID);
                        
                       
                    }
                    else if(dagJob.jobID.contains("mDiff"))
                    {
                        reVal = getMediumList(jobList, nextList, dagJob.jobID);
                        if(!jobList.contains(dagJob.jobID))
                            jobList.add(dagJob.jobID);
                        reVal = 0;

                    }
                    else
                    {

                        reVal = getSmallList(jobList, nextList, dagJob.jobID);
                        if(!jobList.contains(dagJob.jobID))
                            jobList.add(dagJob.jobID);
                        reVal = 1;
                        
                    }

                    double sizeA = getSizeMap(siteIndex);
                    double sizeB = getSizeMap(jobList, siteIndex);
                    double sizeC = sizeA + sizeB;
                    if (sizeC <= limit)
                    {
                        //add the jobList to it
                        flag = 1;
                    }
                    else
                    {
                        //Second step, check medium list
                        flag = 0;
                                          
                        

                    }

                    if (flag == 1  )
                    {
                        //put the whole job list into dax
                        setSizeMap(siteIndex, sizeC);
                    }
                    else 
                    {
                        //build new dax
                        System.out.println("the breakpoint is "+dagJob.jobID + "sizeA:" + sizeA + " sizeB:" +sizeB);
                        dax = new ParADAG("subworkflow" + mDaxMap.size());
                        bigDax = new DAXAbstraction(dax);
                        fList = new HashMap<String, Double>();
                        mDaxMap.put(bigDax, fList);
                        se = (SiteCatalogEntry)mSiteIndex.get(siteIndex);
                        limit = Double.parseDouble(se.getEnvironmentVariable("SITESIZE")) *1e8;
                        bigDax.limit = limit;
                        siteList = new LinkedList<DAXAbstraction>();
                        siteList.add(bigDax);
                        siteIndex ++;
                        bigDax.site = siteIndex;
                        double sizeD = getSizeMap(jobList);
                        setSizeMap(siteIndex, sizeD);
                        mIndexSiteList.put(siteIndex, siteList);
                    }

                    if(reVal==2 && flag != 0)
                    {

                        dax = new ParADAG("subworkflow" + mDaxMap.size());
                        bigDax = new DAXAbstraction(dax);
                        fList = new HashMap<String, Double>();
                        mDaxMap.put(bigDax, fList);
                        se = (SiteCatalogEntry)mSiteIndex.get(siteIndex);
                        limit = Double.parseDouble(se.getEnvironmentVariable("SITESIZE")) *1e8;
                        bigDax.limit = limit;
                        bigDax.site = siteIndex;
                        siteList = (LinkedList)mIndexSiteList.get(siteIndex);
                        siteList.add(bigDax);

                        setSizeMap(siteIndex, sizeC);

                    }

                    //add the job list into dax
                    for(Iterator it = jobList.iterator();it.hasNext();)
                    {
                        edu.isi.pegasus.planner.classes.Job job = getDagJob((String)it.next());
                        updateSizeMap(bigDax, job);
                        addDAGJob(job, dax);
                        mJob2DAX.put(job.getLogicalID(),dax);
                        mJobColor.put(job.logicalId, "BLACK");

                    }
                    //put the next List into queue, otherwise no more job

                    for(Iterator it = nextList.iterator(); it.hasNext();)
                    {
                        edu.isi.pegasus.planner.classes.Job job = getDagJob((String)it.next());
                        String color ="";
                        //delete them later
                        if(!mJobColor.containsKey(job.logicalId))
                        {
                            mJobColor.put(job.logicalId, "WHITE");
                            color = "WHITE";
                        }
                        else
                            color = (String)mJobColor.get(job.logicalId);

                        if( !color.equals("BLACK") && !mQueue.contains(job))
                        {

                            mQueue.addFirst(job);//????????????????
                            
                            mJobColor.put(job.logicalId, "GRAY");
                            depthMap.put(job.jobID, i+1);

                        }
                    }

                    if((reVal==1 || reVal ==2)&&flag != 0)
                    {
                        dax = new ParADAG("subworkflow" + mDaxMap.size());
                        bigDax = new DAXAbstraction(dax);
                        fList = new HashMap<String, Double>();
                        mDaxMap.put(bigDax, fList);
                        se = (SiteCatalogEntry)mSiteIndex.get(siteIndex);
                        limit = Double.parseDouble(se.getEnvironmentVariable("SITESIZE")) *1e8;
                        bigDax.limit = limit;
                        bigDax.site = siteIndex;
                        siteList = (LinkedList)mIndexSiteList.get(siteIndex);
                        siteList.add(bigDax);
                    }


                }

                mJobColor.put(dagJob.logicalId, "BLACK");
                mQueue.remove(dagJob);
            }
            System.out.println("Jobs are all done");
            super.run();

        }

}
