/*
 * To change this template, choose Tools | Templates
 * and open the template in the editor.
 */

package edu.isi.pegasus.planner.subworkflow.partitioner.implementation;

import edu.isi.pegasus.planner.subworkflow.partitioner.BackwardPartitioner;
import edu.isi.pegasus.planner.subworkflow.partitioner.DAXAbstraction;
import edu.isi.pegasus.planner.catalog.site.classes.SiteCatalogEntry;
import edu.isi.pegasus.planner.classes.ADag;
import edu.isi.pegasus.planner.classes.PegasusBag;
import edu.isi.pegasus.planner.dax.ADAG;
import edu.isi.pegasus.planner.dax.ParADAG;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.Iterator;
import java.util.LinkedList;
import java.util.Map;

/**
 *
 * @author wchen
 */
public class BackwardPartitionerRun3 extends BackwardPartitioner{

    public BackwardPartitionerRun3(ADag mDag, PegasusBag mBag)
    {
        super(mDag, mBag);
    }

    @Override
    public void run()
    {
        getProperties();
        ParADAG dax = new ParADAG("subworkflow" + mDaxMap.size());
        DAXAbstraction bigDax = new DAXAbstraction(dax);
        Map fList= new HashMap<String, Double>();
        mDaxMap.put(bigDax, fList);
        //from now on should be implemented by sub clasess
        int siteIndex = 0;
        LinkedList siteList = new LinkedList<DAXAbstraction>();
        bigDax.site = siteIndex;
        siteList.add(bigDax);
        mIndexSiteList.put(siteIndex, siteList);
        translate();
        //translateParallel();
        edu.isi.pegasus.planner.classes.Job rear = getRear();

        

        mQueue.clear();
        mQueue.addLast( rear );
        int i = 0;
        int iter = 0;
        int maxIter = mDag.getNoOfJobs();
        int dex = maxIter/40;
        System.out.println(">>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>");

        while( !mQueue.isEmpty() ){


            iter++;
            if(iter %dex == 0)
                System.out.print("<");

            edu.isi.pegasus.planner.classes.Job dagJob =
                    (edu.isi.pegasus.planner.classes.Job)mQueue.getLast();


            String ccr = (String)mJobColor.get(dagJob.logicalId);
            if((ccr != null) && ccr.equals("BLACK"))
            {
                mQueue.remove(dagJob);
                continue;
            }

            if(!depthMap.containsKey(dagJob.jobID))
            {
                i = 0;
                depthMap.put(dagJob.jobID, 0);
            }
            else
            {
                i = (Integer)depthMap.get(dagJob.jobID);
            }
            ArrayList jobList = new ArrayList();


            SiteCatalogEntry se = (SiteCatalogEntry)mSiteIndex.get(siteIndex);
            double limit = Double.parseDouble(se.getEnvironmentVariable("SITESIZE")) *1e8;
            bigDax.limit = limit;
            bigDax.site = siteIndex;

            if(  (mDag.getChildren(dagJob.jobID).size() >= 2))
            {

                if(!checkChildColor(dagJob.jobID))
                {
                    mQueue.remove(dagJob);
                    mJobColor.put(dagJob.logicalId, "WHITE");
                    //iter --;
                    continue;

                }
                else
                {

                }

            }

           if(mDag.getParents(dagJob.jobID).size() > 10 )
            {
                if(true)
                    //size limit > limit
                    //This part has never been used though
                {
                    //please be careful here it's different to run5 and run6
//                    updateSizeMap(bigDax, dagJob);
//                    addDAGJob(dagJob, dax);
//                    mJob2DAX.put(dagJob.getLogicalID(),dax);
                    mJobColor.put(dagJob.logicalId, "BLACK");

                    dax = new ParADAG("subworkflow" + mDaxMap.size());
                    bigDax = new DAXAbstraction(dax);
                    fList = new HashMap<String, Double>();
                    mDaxMap.put(bigDax, fList);
                    se = (SiteCatalogEntry)mSiteIndex.get(siteIndex);
                    limit = Double.parseDouble(se.getEnvironmentVariable("SITESIZE")) *1e8;
                    bigDax.limit = limit;
                    bigDax.site = siteIndex;//?
                    siteList = (LinkedList)mIndexSiteList.get(siteIndex);
                    siteList.add(bigDax);
                    jobList.clear();
                    jobList.add(dagJob.jobID);
                    //This is wrong her but I have no way to do it. 
                    double sizeD = getSizeMap( jobList, siteIndex);
                    setSizeMap(siteIndex, sizeD + getSizeMap(siteIndex));
                    //System.out.println();
                    //jobList = new ArrayList();
                    //jobList.add(jobID2jobLogicalID(dagJob.jobID));


                    //do no site Index ++ which means they still use the same site possibly
                    //siteIndex ++;
                }
                else
                    // please add it later.
                {

                }

            }
            else
            {

                jobList.clear();
                jobList.add(dagJob.jobID);
                double sizeA = getSizeMap(siteIndex);
                double sizeB = getSizeMap(jobList, siteIndex);
                double sizeC = sizeA + sizeB;
                if( (sizeC) > limit )
                {
                    System.out.println("the breakpoint is "+dagJob.jobID + "sizeA:" + sizeA + " sizeB:" +sizeB);

                    dax = new ParADAG("subworkflow" + mDaxMap.size());
                    bigDax = new DAXAbstraction(dax);
                    fList = new HashMap<String, Double>();
                    mDaxMap.put(bigDax, fList);
                    siteIndex ++;
                    bigDax.site = siteIndex;
                    siteList = new LinkedList<DAXAbstraction>();
                    siteList.add(bigDax);
                    mIndexSiteList.put(siteIndex, siteList);

                    double sizeD = getSizeMap(jobList);
                    setSizeMap(siteIndex, sizeD);

                    

                }
                else
                {
                    setSizeMap(siteIndex, sizeC);
                }
            }
            updateSizeMap(bigDax, dagJob);
            addDAGJob(dagJob, dax);
            mJob2DAX.put(dagJob.getLogicalID(),dax);


            for(Iterator it = mDag.getParents(dagJob.jobID).iterator();it.hasNext();)
            {

                String jobname = (String )it.next();
                edu.isi.pegasus.planner.classes.Job parent = getDagJob(jobname);


                String color ="";

                if(!mJobColor.containsKey(parent.logicalId))
                {
                    mJobColor.put(parent.logicalId, "WHITE");
                    color = "WHITE";
                }
                else
                     color = (String)mJobColor.get(parent.logicalId);

                if(!color.equals("GRAY") && !color.equals("BLACK"))
                {
                    mQueue.addLast(parent);
                    mJobColor.put(parent.logicalId, "GRAY");
                    depthMap.put(parent.jobID, i+1);

                }
            }
            mJobColor.put(dagJob.logicalId, "BLACK");
            mQueue.remove(dagJob);
            //System.out.println("Job " + dagJob.logicalId +" is done");
        }

        
        System.out.println("\nAll jobs are done\n");


    }


}
