/*
 * To change this template, choose Tools | Templates
 * and open the template in the editor.
 */

package edu.isi.pegasus.planner.subworkflow.partitioner.implementation;

import edu.isi.pegasus.planner.subworkflow.partitioner.BackwardPartitioner;
import edu.isi.pegasus.planner.subworkflow.partitioner.DAXAbstraction;
import edu.isi.pegasus.planner.catalog.site.classes.SiteCatalogEntry;
import edu.isi.pegasus.planner.classes.ADag;
import edu.isi.pegasus.planner.classes.PegasusBag;
import edu.isi.pegasus.planner.dax.ADAG;
import edu.isi.pegasus.planner.dax.ParADAG;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.Iterator;
import java.util.LinkedList;
import java.util.Map;
import java.util.Vector;

/**
 *
 * @author wchen
 */
public class BackwardPartitionerRun8 extends BackwardPartitioner{

    protected LinkedList subQueue;
    protected String fanOutJob;

    public BackwardPartitionerRun8(ADag mDag, PegasusBag mBag)
    {
        super(mDag, mBag);
        subQueue = new LinkedList();
        fanOutJob = "";
        
    }

    protected int getBigList(ArrayList jobList, ArrayList nextList, String jobID,boolean isExcluded)
    {
        int reVal = 0;
        boolean skip = false;
        int  depth = (Integer)depthMap.get(jobID);
        Vector pList = mDag.getParents(jobID);
        Vector cList = mDag.getChildren(jobID);
        if(cList.size() >  10 && !isExcluded)
        {
            int check = checkChildrenSame(jobID);
            if(check==1)
            {
                /**
                for(Iterator it = pList.iterator();it.hasNext();)
                {
                    String jobname = (String )it.next();
                    if(!nextList.contains(jobname))
                         nextList.add(jobname);
                }
                if(!jobList.contains(jobID))
                {
                    //jobList.add(jobID);
                }
                 * **/
                if(!nextList.contains(jobID))
                    nextList.add(jobID);
                if(jobList.contains(jobID))
                    jobList.remove(jobID);
                reVal = 1;
                skip = true;
                return reVal;
            }else if(check==0)
            {
                for(Iterator it = pList.iterator();it.hasNext();)
                {
                    String jobname = (String )it.next();
                    if(!nextList.contains(jobname))
                         nextList.add(jobname);
                }
                if(!jobList.contains(jobID))
                {
                    jobList.add(jobID);
                }
                skip =true;
                //reVal = 1;
                //return reVal;
            }
        }
        
        if(!skip)
        for(Iterator it = pList.iterator();it.hasNext();)
        {
            String jobname = (String )it.next();
            //edu.isi.pegasus.planner.classes.Job parent = getDagJob(jobname);
            String logicalID = jobID2jobLogicalID(jobname);
            String color = (String)mJobColor.get(logicalID);

            if(!depthMap.containsKey(jobname))
                depthMap.put(jobname, depth+1);
            if((color==null)|| (!color.equals("BLACK")))
            {
                  if(!jobList.contains(jobname))
                    {
                        subQueue.add(jobname);
                        jobList.add(jobname);
                        if(getBigList(jobList, nextList, jobname, false)==1)
                            reVal = 1;

                    }
            }



        }

    

        boolean flag = false;

        for(Iterator it = cList.iterator();it.hasNext();)
        {
            String jobname = (String )it.next();
            //edu.isi.pegasus.planner.classes.Job child = getDagJob(jobname);
            String logicalID = jobID2jobLogicalID(jobname);
            String color = (String)mJobColor.get(logicalID);
            if(!depthMap.containsKey(jobname))
                depthMap.put(jobname, depth-1);
            int i = (Integer)depthMap.get(jobname);
            if((color==null)||( !color.equals("BLACK")))
            {
                if(!jobList.contains(jobname))
                {
                    if(true )//subQueue.contains(jobname))
                    {
                        jobList.add(jobname);
                        if(getBigList(jobList, nextList, jobname,false)==1)
                            reVal= 1;
                    }
                    else
                    {
                        flag =true;
                    }


                }
            }
            else if(color.equals("BLACK") && false)//this should be if that one is different to the current site index
            {
                if(!jobList.contains(jobname))
                {
                    if( subQueue.contains(jobname))
                    {
                        jobList.add(jobname);
                        if(getBigList(jobList, nextList, jobname,false)==1)
                            reVal= 1;
                    }
                    else
                    {
                        flag =true;
                    }


                }

            }

        }


        return reVal;
    }
    protected int getMediumList(ArrayList jobList, ArrayList nextList, String jobID)
    {
        //String fanOutJob = "";
        int reVal = 0;
        int  depth = (Integer)depthMap.get(jobID);

        Vector cList = mDag.getChildren(jobID);
        if (cList.size() > 10)
        {
            fanOutJob = jobID;
            jobList.remove(jobID);
            if(!nextList.contains(jobID))
                nextList.add(jobID);

            return reVal;
        }


        Vector pList = mDag.getParents(jobID);
        for(Iterator it = pList.iterator();it.hasNext();)
        {
            String jobname = (String )it.next();
            //edu.isi.pegasus.planner.classes.Job parent = getDagJob(jobname);
            String logicalID = jobID2jobLogicalID(jobname);
            String color = (String)mJobColor.get(logicalID);

            if(!depthMap.containsKey(jobname))
                depthMap.put(jobname, depth+1);

            if((color==null)|| (!color.equals("BLACK")))

            {
                  if(!jobList.contains(jobname))
                    {
                        subQueue.add(jobname);
                        jobList.add(jobname);
                        if(getMediumList(jobList, nextList, jobname)==1)
                            reVal = 1;
                    }
            }
        }
        return reVal;
    }
    protected int getSmallList(ArrayList jobList, ArrayList nextList, String jobID)
    {
        int reVal = 0;
        int  depth = (Integer)depthMap.get(jobID);
        Vector pList = mDag.getParents(jobID);
        if(pList.size() > 10)
        {
            for(Iterator it = pList.iterator();it.hasNext();)
            {
                String jobname = (String )it.next();
                edu.isi.pegasus.planner.classes.Job parent = getDagJob(jobname);
                String color ="";

                if(!mJobColor.containsKey(parent.logicalId))
                {
                    mJobColor.put(parent.logicalId, "WHITE");
                    color = "WHITE";
                }
                else
                    color = (String)mJobColor.get(parent.logicalId);

                if(!color.equals("BLACK") && !color.equals("BLACK"))
                {
                    nextList.add(parent.jobID);
                    //I think you should set it here
                    //mJobColor.put(parent.logicalId, "GRAY");
                    //depthMap.put(parent.jobID, i+1);

                }
            }

        }
        else
        {

            for(Iterator it = pList.iterator();it.hasNext();)
            {
                String jobname = (String )it.next();
                //edu.isi.pegasus.planner.classes.Job parent = getDagJob(jobname);
                String logicalID = jobID2jobLogicalID(jobname);
                String color = (String)mJobColor.get(logicalID);

                if(!depthMap.containsKey(jobname))
                    depthMap.put(jobname, depth+1);
                if((color==null)|| (!color.equals("BLACK")))

                {
                      if(!jobList.contains(jobname))
                        {
                            subQueue.add(jobname);
                            jobList.add(jobname);
                            if(getSmallList(jobList, nextList, jobname)==1)
                                reVal = 1;

                        }
                }



            }
        }
        
        return reVal;
    }
 
    private int checkChildrenSame(String jobID)
    {
        Vector vec = mDag.getChildren(jobID);
        if(vec.size()<=0)return 0;
        String childJob = (String)vec.get(0);
        ADAG childDax = (ADAG)mJob2DAX.get(jobID2jobLogicalID(childJob));
        for(Iterator it = mDag.getChildren(jobID).iterator();it.hasNext();)
        {
            String child = (String)it.next();
            ADAG newDax = (ADAG)mJob2DAX.get(jobID2jobLogicalID(child));
            if(newDax == null)continue;
            if(newDax!=childDax)
            {
                return 1;
            }

        }

        return 0;

    }

    /**
     *
     * @param jobID
     * @return 1: has multiple fanoutjob
     * @return 2: has single fanoutjob
     */
    private int checkMultipleFanOutJob(String jobID)
    {
        Vector vec = mDag.getChildren(jobID);
        if(vec.size()<=0)return 0;
        String fanOutJob = (String)vec.get(0);
        Vector pVec = mDag.getParents(fanOutJob);
        //do sampling rather check all of them
        int size = pVec.size();
        String fanInJob = "";
        for(int i = 0; i < 4; i ++)
        {
            String pJob = (String)pVec.get(i * size /4);
            while(true)
            {
                Vector ppVec = mDag.getParents(pJob);
                if(ppVec.size() <=0)
                {
                    System.out.println("Error here");
                    return 0;
                }
                pJob = (String)ppVec.get(0);
                if(mDag.getChildren(pJob).size()>10)
                {
                    if(fanInJob.equals("") ||fanInJob.equals(pJob))
                    {
                        fanInJob = pJob;
                        break;
                    }
                    else
                    {
                        return 1;
                    }
                }


            }
        }
        return 2;
    }
          
    @Override
    public void run()
    {
        getProperties();
        ParADAG dax = new ParADAG("subworkflow" + mDaxMap.size());
        DAXAbstraction bigDax = new DAXAbstraction(dax);
        Map fList= new HashMap<String, Double>();
        mDaxMap.put(bigDax, fList);
        int siteIndex = 0;
        LinkedList siteList = new LinkedList<DAXAbstraction>();
        bigDax.site = siteIndex;
        int hasMultipleFanOutJob = 0;
        siteList.add(bigDax);
        mIndexSiteList.put(siteIndex, siteList);
        //long startTime = System.nanoTime();
        translate();
        //translateParallel();
        //long endTime = System.nanoTime();
        //System.out.println("Duration:" + (endTime - startTime));
        edu.isi.pegasus.planner.classes.Job rear = getRear();
        
        
        mQueue.clear();
        mQueue.addLast( rear );

        int i = 0;
        ArrayList tmpJobList = null;
        ArrayList tmpNextList = null;
        String tmpJobID = "";
        SiteCatalogEntry se = (SiteCatalogEntry)mSiteIndex.get(siteIndex);
        double limit = Double.parseDouble(se.getEnvironmentVariable("SITESIZE")) *1e8;
        bigDax.limit = limit;
        ArrayList jobList = new ArrayList();
        ArrayList nextList = new ArrayList();
        while( !mQueue.isEmpty() ){

        /**There is a problem, it only works for fan-in and fan-out struct.
         * Pay attention to a parent has two children and one has reached
         **/

            edu.isi.pegasus.planner.classes.Job dagJob =
                    (edu.isi.pegasus.planner.classes.Job)mQueue.getLast();
 //            String runtime = (String)dagJob.vdsNS.get(Pegasus.RUNTIME_KEY);

            String ccr = (String)mJobColor.get(dagJob.logicalId);
            if((ccr != null) && ccr.equals("BLACK"))
            {
                mQueue.remove(dagJob);
                continue;
            }
            if(!depthMap.containsKey(dagJob.jobID))
            {
                i = 0;
                depthMap.put(dagJob.jobID, 0);
            }
            else
            {
                i = (Integer)depthMap.get(dagJob.jobID);
            }
                int flag = 0;
                int jobType = 0;
                double sizeA = 0.0, sizeB = 0.0, sizeC = 0.0;
                int reVal = 0;int reVal2 = 0;
                if(mDag.getChildren(dagJob.jobID).size() >= 1)
                {
                    String childJob = (String)mDag.getChildren(dagJob.jobID).get(0);
                    Vector pList = mDag.getParents(childJob);
                    if(pList.size()> 10)
                    {
                        jobType = 1;//fan in job
                    }
                    else
                    {
                        jobType = 2;//fan out job
                    }
                }else
                {
                    jobType = 3;//normal job
                }
                subQueue.clear();
                jobList.clear();
                nextList.clear();
//                if(dagJob.jobID.contains("ID3_208_6"))
//                    System.out.println();
                if(jobType == 1)
                {

                    if(hasMultipleFanOutJob==0)
                    {
                        hasMultipleFanOutJob = checkMultipleFanOutJob(dagJob.jobID);
                    }
                    if(hasMultipleFanOutJob==1)//multiple job:cybershake
                    {
                        reVal = getBigList(jobList,nextList,dagJob.jobID,true);
                        if(!jobList.contains(dagJob.jobID))jobList.add(dagJob.jobID);

                        sizeA = getSizeMap(siteIndex);
                        sizeB = getSizeMap(jobList, siteIndex);
                        sizeC = sizeA + sizeB;
                        if(sizeC < limit)
                        {
                            flag = 0; reVal = 0;
                        }
                        else
                        {
                            flag = 1; reVal = 1; 
                        }

                    }

                    else if(hasMultipleFanOutJob == 2)//single job:montage
                    {
                        reVal = getMediumList(jobList, nextList, dagJob.jobID);
                        if(!jobList.contains(dagJob.jobID))
                            jobList.add(dagJob.jobID);
                        
                        sizeA = getSizeMap(siteIndex);
                        sizeB = getSizeMap(jobList, siteIndex);
                        sizeC = sizeA + sizeB;
                        if(sizeC < limit)
                        {
                            //no need to add new dax
                            flag = 0;
                            //no need to add new dax
                            reVal = 0;
                        }
                        else
                        {
                            flag = 1;
                            //after that
                            reVal = 1;
                        }
                    }


                   
                }
                else if(jobType == 2)
                {

                    if(dagJob.jobID.equals(tmpJobID))
                    {
                        jobList = tmpJobList;
                        nextList = tmpNextList;
                    }
                    else
                        reVal = getBigList(jobList, nextList, dagJob.jobID,true);
                    if(!jobList.contains(dagJob.jobID))
                        jobList.add(dagJob.jobID);
                    sizeA = getSizeMap(siteIndex);
                    sizeB = getSizeMap(jobList, siteIndex);
                    sizeC = sizeA + sizeB;
                    if(sizeC < limit)
                    {
                        //no need to add new dax
                        flag = 0;
                        //no need to add new dax
                        reVal = 1;//checkChildrenSame(dagJob.jobID);
                    }
                    else
                    {
                        tmpJobID = dagJob.jobID;
                        tmpNextList = new ArrayList(nextList);
                        tmpJobList = new ArrayList(jobList);
                        
                        subQueue.clear();
                        jobList.clear();
                        nextList.clear();
                        reVal = getSmallList(jobList, nextList, dagJob.jobID);
                        if(!jobList.contains(dagJob.jobID))
                            jobList.add(dagJob.jobID);
                        sizeB = getSizeMap(jobList, siteIndex);
                        sizeC = sizeB + sizeA;
                        if(sizeC < limit)
                        {
                            flag = 0;
                            reVal = checkChildrenSame(dagJob.jobID);
                            reVal2 = 1;//because we already check the big list that it's not possible to add all of them
                            //which is very important to know
                            //Here we have a huge assumption that the whole stuff is not possible to be put into one workflow and it has less than 2 layers.
                            //otherwise we should roll back and compare it with siteIndex+1

                        }
                        else
                        {
                            System.out.println("You should never been here\n");
                            flag = 1;
                            reVal = 2;

                            dax = new ParADAG("subworkflow" + mDaxMap.size());

                            /* start to write dax file*/
                            //checkDaxWriteStart(bigDax);

                            bigDax = new DAXAbstraction(dax);
                            fList = new HashMap<String, Double>();
                            mDaxMap.put(bigDax, fList);
                            se = (SiteCatalogEntry)mSiteIndex.get(siteIndex);
                            limit = Double.parseDouble(se.getEnvironmentVariable("SITESIZE")) *1e8;
                            bigDax.limit = limit;

                            siteList = new LinkedList<DAXAbstraction>();
                            siteList.add(bigDax);
                            siteIndex ++;
                            bigDax.site = siteIndex;
                            double sizeD = getSizeMap(jobList);
                            setSizeMap(siteIndex, 0);
                            mIndexSiteList.put(siteIndex, siteList);




                            continue;
                        }
                    }
                }
                else if(jobType ==3)//dont need to specify the tmpjobid
                {
                    if(false)//for cybershake
                    {
                        if(dagJob.jobID.equals(tmpJobID))
                        {
                            jobList = tmpJobList;
                            nextList = tmpNextList;
                        }
                        else
                            reVal = getBigList(jobList, nextList, dagJob.jobID,true);
                        if(!jobList.contains(dagJob.jobID))
                            jobList.add(dagJob.jobID);
                        sizeA = getSizeMap(siteIndex);
                        sizeB = getSizeMap(jobList, siteIndex);
                        sizeC = sizeA + sizeB;
                    }
                    else
                        sizeC = limit + 1;
                    if(sizeC < limit)
                    {
                        //no need to add new dax
                        flag = 0;
                        //no need to add new dax
                        reVal = 1;//checkChildrenSame(dagJob.jobID);
                    }
                    else
                    {
                        tmpJobID = dagJob.jobID;
                        tmpNextList = new ArrayList(nextList);
                        tmpJobList = new ArrayList(jobList);

                        subQueue.clear();
                        jobList.clear();
                        nextList.clear();
                        reVal = getSmallList(jobList, nextList, dagJob.jobID);
                        if(!jobList.contains(dagJob.jobID))
                            jobList.add(dagJob.jobID);
                        sizeB = getSizeMap(jobList, siteIndex);
                        sizeC = sizeB + sizeA;
                        if(sizeC < limit)
                        {
                            flag = 0;
                            reVal = 0;
                            reVal2 = 1 ;//because we already check the big list that it's not possible to add all of them
                            //which is very important to know
                            //Here we have a huge assumption that the whole stuff is not possible to be put into one workflow and it has less than 2 layers.
                            //otherwise we should roll back and compare it with siteIndex+1

                        }
                        else
                        {
                            System.out.println("You should never been here\n");
                            flag = 1;
                            reVal = 2;

                            dax = new ParADAG("subworkflow" + mDaxMap.size());

                           /* start to write dax file*/
                            //checkDaxWriteStart(bigDax);

                            bigDax = new DAXAbstraction(dax);
                            fList = new HashMap<String, Double>();
                            mDaxMap.put(bigDax, fList);
                            se = (SiteCatalogEntry)mSiteIndex.get(siteIndex);
                            limit = Double.parseDouble(se.getEnvironmentVariable("SITESIZE")) *1e8;
                            bigDax.limit = limit;

                            siteList = new LinkedList<DAXAbstraction>();
                            siteList.add(bigDax);
                            siteIndex ++;
                            bigDax.site = siteIndex;
                            double sizeD = getSizeMap(jobList);
                            setSizeMap(siteIndex, 0);
                            mIndexSiteList.put(siteIndex, siteList);




                            continue;
                        }
                    }
                }

                if (flag == 0  )
                {
                    //put the whole job list into dax
                    setSizeMap(siteIndex, sizeC);
                }
                else if(reVal == 1 && flag == 1)
                {
                    //build new dax
                    System.out.println("the breakpoint is "+dagJob.jobID + "sizeA:" + sizeA + " sizeB:" +sizeB);
                    dax = new ParADAG("subworkflow" + mDaxMap.size());

                    /* start to write dax file*/
                    //checkDaxWriteStart(bigDax);
                    

                    bigDax = new DAXAbstraction(dax);
                    
                    
                    fList = new HashMap<String, Double>();
                    mDaxMap.put(bigDax, fList);
                    se = (SiteCatalogEntry)mSiteIndex.get(siteIndex);
                    limit = Double.parseDouble(se.getEnvironmentVariable("SITESIZE")) *1e8;
                    bigDax.limit = limit;

                    siteList = new LinkedList<DAXAbstraction>();
                    siteList.add(bigDax);
                    siteIndex ++;
                    bigDax.site = siteIndex;
                    double sizeD = getSizeMap(jobList);
                    setSizeMap(siteIndex, sizeD);
                    mIndexSiteList.put(siteIndex, siteList);
                }

                if(reVal==1 && flag == 0)
                {

                    dax = new ParADAG("subworkflow" + mDaxMap.size());

                    /* start to write dax file*/
                    //checkDaxWriteStart(bigDax);

                    bigDax = new DAXAbstraction(dax);
                    fList = new HashMap<String, Double>();
                    mDaxMap.put(bigDax, fList);
                    se = (SiteCatalogEntry)mSiteIndex.get(siteIndex);
                    limit = Double.parseDouble(se.getEnvironmentVariable("SITESIZE")) *1e8;
                    bigDax.limit = limit;

                    bigDax.site = siteIndex;
                    siteList = (LinkedList)mIndexSiteList.get(siteIndex);
                    siteList.add(bigDax);
                    setSizeMap(siteIndex, sizeC);

                }

                //add the job list into dax
                for(Iterator it = jobList.iterator();it.hasNext();)
                {
                    edu.isi.pegasus.planner.classes.Job job = getDagJob((String)it.next());
                    updateSizeMap(bigDax, job);
                    addDAGJob(job, dax);
                    bigDax.subDag.add(job);

                    mJob2DAX.put(job.getLogicalID(),dax);
                    //mJob2DAX.put(job.jobID, dax);
                    mJobColor.put(job.logicalId, "BLACK");

                }
                //put the next List into queue, otherwise no more job

                for(Iterator it = nextList.iterator(); it.hasNext();)
                {
                    edu.isi.pegasus.planner.classes.Job job = getDagJob((String)it.next());
                    String color ="";
                    //delete them later
                    if(!mJobColor.containsKey(job.logicalId))
                    {
                        mJobColor.put(job.logicalId, "WHITE");
                        color = "WHITE";
                    }
                    else
                        color = (String)mJobColor.get(job.logicalId);

                    if( !color.equals("BLACK") && !mQueue.contains(job))
                    {

                        mQueue.addFirst(job);//????????????????

                        mJobColor.put(job.logicalId, "GRAY");
                        depthMap.put(job.jobID, i+1);

                    }
                }

                if(reVal2 ==1 )
                {
                    dax = new ParADAG("subworkflow" + mDaxMap.size());

                    /* start to write dax file*/
                    //checkDaxWriteStart(bigDax);
                    
                    bigDax = new DAXAbstraction(dax);
                    fList = new HashMap<String, Double>();
                    mDaxMap.put(bigDax, fList);
                    
                    se = (SiteCatalogEntry)mSiteIndex.get(siteIndex);
                    limit = Double.parseDouble(se.getEnvironmentVariable("SITESIZE")) *1e8;
                    bigDax.limit = limit;
                    bigDax.site = siteIndex;
                    siteList = (LinkedList)mIndexSiteList.get(siteIndex);
                    siteList.add(bigDax);
                }

                mJobColor.put(dagJob.logicalId, "BLACK");
                mQueue.remove(dagJob);
            }
            /* start to write dax file*/
            //checkDaxWriteStart(bigDax);
            System.out.println("Jobs are all done");
            super.run();

        }

}
