/*
 * To change this template, choose Tools | Templates
 * and open the template in the editor.
 */

package edu.isi.pegasus.planner.subworkflow.scheduler;


import edu.isi.pegasus.common.util.DynamicLoader;
import edu.isi.pegasus.planner.subworkflow.partitioner.SiteAbstraction;
import edu.isi.pegasus.planner.subworkflow.partitioner.PartitionerImplementation;
import edu.isi.pegasus.planner.subworkflow.partitioner.DAXAbstraction;
import edu.isi.pegasus.common.util.Version;
import edu.isi.pegasus.planner.catalog.replica.ReplicaCatalogEntry;
import edu.isi.pegasus.planner.catalog.site.classes.GridGateway;
import edu.isi.pegasus.planner.catalog.site.classes.SiteCatalogEntry;
import edu.isi.pegasus.planner.catalog.site.classes.SiteStore;
import edu.isi.pegasus.planner.classes.DAXJob;
import edu.isi.pegasus.planner.classes.PegasusFile;
import edu.isi.pegasus.planner.classes.PegasusFile.LINKAGE;
import edu.isi.pegasus.planner.namespace.Hints;
import edu.isi.pegasus.planner.classes.ADag;
import edu.isi.pegasus.planner.classes.PCRelation;
import edu.isi.pegasus.planner.classes.PegasusBag;
import edu.isi.pegasus.planner.classes.ReplicaLocation;
import edu.isi.pegasus.planner.dax.ADAG;
import edu.isi.pegasus.planner.dax.ParADAG;
import edu.isi.pegasus.planner.subworkflow.partitioner.PartitionResult;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.Iterator;
import java.util.LinkedList;
import java.util.List;
import java.util.Map;
import java.util.Vector;
/**
 *
 * @author wchen
 * @weather  66F, Tue Apr 12,15:58
 */


public abstract class Scheduler implements SchedulerImplementation{

    protected ADag mDag ;
    //private Map mJob2DAX;
    protected ArrayList mParentDAX;
    protected ArrayList mChildDAX;
    private Map mDaxRC;

    protected Map mDaxMap;
    protected PegasusBag mBag;

    protected ArrayList mSiteIndex;
    private String reorder;

    //remove at first and then add
    private List<PCRelation> mReducedMapDag;
    //add at first and then remove
    private List<PCRelation> mReducedMapDag2;
    protected LinkedList mDaxQueue;
    protected Map mJob2Time;
    protected Map mDAXID2DAX;
    public static final String DEFAULT_PARTITIONER_PATH =
        "edu.isi.pegasus.planner.subworkflow.partitioner.implementation";
    public static final String DEFAULT_ESTIMATOR_PATH =
         "edu.isi.pegasus.planner.subworkflow.estimator.implementation";
    public static final String DEFAULT_PARTITIONER = "BackwardPartitioner8";
    public static final String DEFAULT_ESTIMATOR = "CriticalPath";
    public Map getDAXRC()
    {
        return mDaxRC;
    }
    public Scheduler(ADag dag, PegasusBag bag){
        mDag = dag;
        mBag = bag;
        //mJob2DAX = new HashMap<String, ADAG>();
        mParentDAX = new ArrayList<String>();
        mChildDAX = new ArrayList<String>();
        //mDaxRC =  new HashMap<String, ReplicaLocation>();
        //mDaxSize = new HashMap<String, Double>();
        //mDaxList = new ArrayList<ADAG>();
        mDaxMap = new HashMap<DAXAbstraction, Map>();
        //mSiteMap =  new HashMap<Integer, String>();
        mReducedMapDag = new ArrayList<PCRelation>();
        mReducedMapDag2 = new ArrayList<PCRelation>();
        mSiteIndex = new ArrayList<SiteAbstraction>();
        //mSiteSizeMap = new HashMap<Integer, Double>();
        mDaxQueue = new LinkedList<DAXAbstraction>();
        mJob2Time = new HashMap();
        mDAXID2DAX = new HashMap<String, DAXAbstraction>();

    }


    /**
     * convert the relation in mParentDAX and mChildDAX into mDag.relation
     */
    
    private void addRelation()
    {

        for(int i = 0; i < mParentDAX.size(); i++)
        {
            String parent = (String)mParentDAX.get(i);
            String child = (String)mChildDAX.get(i);
            mDag.addNewRelation(parent, child);
        }

    }
    protected void addReplicaCatalog(String lfn, String pfn)
    {

        ReplicaCatalogEntry rce = new ReplicaCatalogEntry( "file://" + pfn, "local" );
        ArrayList al = new ArrayList();
        al.add(rce);
        ReplicaLocation rc = new ReplicaLocation(lfn, al);
        mDaxRC.put(lfn, rc);

    }

    protected void addDAXJob2DAG(String file, String logicalID, String site)
    {

        edu.isi.pegasus.planner.classes.Job j = new edu.isi.pegasus.planner.classes.Job( );
        j.setUniverse( GridGateway.JOB_TYPE.compute.toString() );
        j.setJobType( edu.isi.pegasus.planner.classes.Job.COMPUTE_JOB );
        j.setLogicalID( logicalID );
        DAXJob daxJob = new DAXJob( j );
        PegasusFile pf = new PegasusFile( file );


        pf.setLinkage( LINKAGE.INPUT );
        //the job should be tagged type pegasus
        daxJob.setTypeRecursive();
        //the job should always execute on local site
        //for time being
        daxJob.hints.construct( Hints.EXECUTION_POOL_KEY, "local" );

        //also set a fake executable to be used
        daxJob.hints.construct( Hints.PFN_HINT_KEY, "/tmp/pegasus-plan" );

        //retrieve the extra attribute about the DAX
        daxJob.setDAXLFN( file );
        daxJob.addInputFile( pf );

        //add default name and namespace information
        daxJob.setTransformation( "pegasus",
                                  "pegasus-plan",
                                  Version.instance().toString() );


        daxJob.setDerivation( "pegasus",
                              "pegasus-plan",
                               Version.instance().toString() );

        daxJob.level       = -1;

        daxJob.setName( logicalID);
        String arg_schema = this.mBag.getPegasusProperties().getProperty("pegasus.subworkflow.argument.schema");
        String arg_dir  = this.mBag.getPegasusProperties().getProperty("pegasus.subworkflow.argument.dir");

        String arg_prop = this.mBag.getPegasusProperties().getProperty("pegasus.subworkflow.argument.prop");

        String arguments = " -Dpegasus.schema.dax=" + arg_schema
                + " -Dpegasus.user.properties=" + arg_prop
                + " --dir " + arg_dir
                + " --cluster horizontal"
                + " -s " + site
                + " -basename tile-00001"
                + " --force "
                + " --nocleanup";

        daxJob.setSiteHandle(site);
        //daxJob.setExecutionPool();
        daxJob.setArguments(arguments);
        mDag.add(daxJob);

    }


        private void reduceByLabel()
    {
        Vector relation = mDag.dagInfo.relations;
        int sum = 0;
        int l2size = relation.size();
        PCRelation pcr = new PCRelation();
        PCRelation bcr = null;
        PCRelation tcr = null;

        for(int i = 0; i < l2size; i ++)
         {

            PCRelation rel = (PCRelation)relation.get(i - sum);


            if( (rel.parent.contains("mProject") && rel.child.contains("mBackground"))
                    || (rel.parent.contains("mBackground") && rel.child.contains("mAdd"))
                    || (rel.parent.contains("mShrink") && rel.child.contains("mAdd"))
                    || (rel.parent.contains("mBackground") && rel.child.contains("mAdd")))
                    {
                        mReducedMapDag.add(rel);
                        relation.remove(rel);
                        //System.out.println(rel);
                        sum ++;
                    }
            //For CyberShake
            if(rel.child.contains("_ZipSeis") )
            {
                pcr.setChild(rel.child);
                mReducedMapDag.add(rel);
                relation.remove(rel);
                sum ++;
            }
            if(rel.parent.contains("ZipPSA") || rel.parent.contains("ZipPeakSA"))
            {
                pcr.setParent(rel.parent);
                bcr = rel;
                relation.remove(bcr);
                sum ++;
            }
            if(rel.parent.contains("_ZipSeis"))
            {
                tcr = rel;
                relation.remove(tcr);
                sum ++;
            }

        }

        relation.add(pcr);
        //relation.add(bcr);
        relation.add(tcr);



        mReducedMapDag2.add(pcr);


    }
    private void restore()
    {
        for(Iterator it = mReducedMapDag.iterator(); it.hasNext();)
        {
            PCRelation rel =(PCRelation)it.next();
            mDag.dagInfo.relations.add(rel);
        }
        for(Iterator it = mReducedMapDag2.iterator(); it.hasNext();)
        {
            PCRelation rel =(PCRelation)it.next();
            mDag.dagInfo.relations.remove(rel);
        }


    }
    private void checkDAXRel(ADag mDag, Map jobMap, Map daxMap,
            Map ID2LogicalID,ArrayList pDAX, ArrayList cDAX)
    {
        String pFake="";
        String cFake="";
//        ArrayList parentDAXList = new ArrayList();
//        ArrayList childDAXList = new ArrayList();
        Map dag2bigDax = new HashMap<ADAG, DAXAbstraction>();
        for(Iterator it = daxMap.keySet().iterator();it.hasNext();)
        {
            DAXAbstraction bigDax = (DAXAbstraction)it.next();
            dag2bigDax.put(bigDax.adag, bigDax);
        }


        Vector relation = mDag.dagInfo.relations;
        int size = relation.size();

        for(int i = 0; i < size; i ++)
        {
            PCRelation rel = (PCRelation)relation.get(i);
            String parent = rel.parent;
            String child  = rel.child;
            String parentName = (String)ID2LogicalID.get(parent);
            String childName  = (String)ID2LogicalID.get(child);
            ParADAG childDAX = (ParADAG)jobMap.get(childName);
            ParADAG parentDAX = (ParADAG)jobMap.get(parentName);

            if(childDAX.equals(parentDAX))
                   {
                        DAXAbstraction bigDax = (DAXAbstraction)dag2bigDax.get(childDAX);
                        childDAX.addDependency(parentName, childName);
                        bigDax.subDag.addNewRelation(parent, child);
                   }
                   else
                   {

                       if(!pFake.equals(parentDAX.getName())||!cFake.equals(childDAX.getName()))
                        {
//                            parentDAXList.add(parentDAX.mName);
//                            childDAXList.add(childDAX.mName);
                            pDAX.add(parentDAX.getName());
                            cDAX.add(childDAX.getName());
                            pFake = parentDAX.getName();
                            cFake = childDAX.getName();
                       }


                   }

        }
//        for(int i = 0; i < parentDAXList.size(); i++)
//        {
//            String sName = (String)parentDAXList.get(i);
//            String cName = (String)childDAXList.get(i);
//            pDAX.add(sName);
//            cDAX.add(cName);
//        }

    }
    public void addDAXJob()
    {
        String reduceMethod = mBag.getPegasusProperties().getProperty("pegasus.partition.reduce");

        if(reduceMethod!=null && reduceMethod.equals("label"))
            reduceByLabel();

        // The usual way of a dax name is subworkflow0, subworkflow1
        getProperties();
        PartitionerImplementation p ;
//        EstimatorImplementation e;

        //int num2job = this.mDag.getNoOfJobs();
        //int num3job = 0;
        String className = this.mBag.getPegasusProperties().getProperty("pegasus.subworkflow.partitioner");
        if(className == null || className.equals(""))
            className = DEFAULT_PARTITIONER;
        //String className = "BackwardPartitionerRun2";
        System.out.println("Selected Paritioner is " + className);
        className = (className.indexOf('.') == -1) ? DEFAULT_PARTITIONER_PATH + "." + className : className;
        DynamicLoader dl_pa = new DynamicLoader(className);
        System.out.println("ClassName:" + className);
        Object argList[] = new Object[2];
        argList[0] = mDag;
        argList[1] = mBag;



        try
        {
  //        e = (EstimatorImplementation) dl_es.instantiate(argList2);
           p = (PartitionerImplementation) dl_pa.instantiate(argList);
            p.run();
            PartitionResult pr = p.getResult();
            mDaxMap = pr.mDaxMap;
            
//
//            mJob2DAX = pr.mJob2DAX;
            mDaxRC = pr.mDaxRC;//this is different
            mJob2Time = pr.mJob2Time;
            //num3job = mJob2DAX.size();
            //System.out.println("There are " + num2job + " jobs mapped to " + num3job +" jobs\n");

            restore();
            mDag = p.getDag();//make it parallel
            //int length = mDag.vJobSubInfos.size();
            ArrayList threadList = new ArrayList<checkDAXRel>();
            int num2slot = 1;
            long startTime = System.currentTimeMillis();
            //int size2rel;
            /**
            for (int j = 0; j < num2slot; j++)
            {
                checkDAXRel cdr = new checkDAXRel( j* length/num2slot,
                        (j+1)*length/num2slot, mDag, pr.mJob2DAX, pr.mDaxMap, pr.mID2LogicalID);
                threadList.add(cdr);

            }

            for(Iterator it = threadList.iterator(); it.hasNext();)
            {
                checkDAXRel cdr = (checkDAXRel)it.next();
                cdr.getDone();
                cdr.addDAX(mParentDAX, mChildDAX);

            }**/
            checkDAXRel(mDag, pr.mJob2DAX, pr.mDaxMap, pr.mID2LogicalID,mParentDAX, mChildDAX);
            /*
            for(int j = 0; j< num2slot; j++)
            {
                checkDAXRel2 cdr = new checkDAXRel2( j,
                        num2slot, mDag, pr.mJob2DAX, pr.mDaxMap, pr.mID2LogicalID);
                threadList.add(cdr);
            }
            for(Iterator it = threadList.iterator(); it.hasNext();)
            {
                checkDAXRel2 cdr = (checkDAXRel2)it.next();
                cdr.getDone();
                cdr.addDAX(mParentDAX, mChildDAX);

            }*/
            long endTime = System.currentTimeMillis();
            System.out.println("Duration of checkDAXRel: " + (endTime - startTime));
            mDag.clearJobs();
            //start to write dax
            ArrayList writeThreads = new ArrayList<writeToFile>();
            String subPath = this.mBag.getPegasusProperties().getProperty("pegasus.subworkflow.argument.dir");
            for(Iterator it = mDaxMap.keySet().iterator(); it.hasNext();)
            {
                DAXAbstraction bigDax =((DAXAbstraction)it.next());
                ParADAG daxItem = bigDax.adag;

                String daxName = daxItem.getName() + ".xml";
                String dax_path = subPath + "/" + daxName;

                writeThreads.add( new writeToFile(daxItem, dax_path));
                //daxItem.writeToFile(dax_path);
                addReplicaCatalog(daxName , dax_path);
            }
            siteSelector();//estimator also
            addRelation();

            for (Iterator it = writeThreads.iterator(); it.hasNext();)
            {
                writeToFile wt = (writeToFile)it.next();
                wt.getDone();
            }
            //important here
            //p.checkDaxWriteEnd();

            printOut();

        }
        catch (Exception ex)
        {
            ex.printStackTrace();
        }

    }
    private void printOut()
    {
        System.out.println("\n\n There are " + mDaxMap.size() + " sub workflows (two are fake nodes. Don't worry.)\n");


    }
    public void siteSelector()
    {
        System.out.println("You're wrong to be here\n");
    }

   
    private void getProperties()
    {
        try
        {

            SiteStore mSite = mBag.getHandleToSiteStore();

            for (Iterator it = mSite.entryIterator();it.hasNext();)
            {
                SiteCatalogEntry siteEntry = (SiteCatalogEntry)it.next();
                String siteName = siteEntry.getSiteHandle();
                if(siteName.equals("local"))continue;
                double size = Double.parseDouble(siteEntry.getEnvironmentVariable("SITESIZE")) * 1e8;

                int slot    = Integer.parseInt(siteEntry.getEnvironmentVariable("SLOT"));

                SiteAbstraction site = new SiteAbstraction(siteName, size, slot);
                mSiteIndex.add(site);
                System.out.println(siteName+" has been selected in SubworkflowPartitioner.java");

            }
            reorder = mBag.getPegasusProperties().getProperty("pegasus.subworkflow.reorder");

        }
        catch (Exception e)
        {
            e.printStackTrace();
        }
    }
}
