package skewreduce.framework;

import java.io.DataInputStream;
import java.io.File;
import java.io.FileInputStream;
import java.io.IOException;
import java.io.PrintWriter;
import java.util.ArrayList;
import java.util.Collections;
import java.util.HashSet;
import java.util.IdentityHashMap;
import java.util.List;
import java.util.Set;
import java.util.concurrent.ConcurrentHashMap;

import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.mapred.ClusterStatus;
import org.apache.hadoop.mapred.JobClient;
import org.apache.hadoop.mapred.JobConf;
import org.apache.hadoop.mapreduce.JobID;

import org.jgrapht.DirectedGraph;
import org.jgrapht.graph.DefaultEdge;
import org.jgrapht.graph.DefaultDirectedGraph;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;

import skewreduce.framework.logical.LFinalizeOp;
import skewreduce.framework.logical.LMergeOp;
import skewreduce.framework.logical.LPartitionOp;
import skewreduce.framework.logical.LSampleOp;
import skewreduce.framework.logical.LProcessOp;
import skewreduce.framework.logical.LogicalOp;
import skewreduce.framework.physical.PGenerateOp;
import skewreduce.framework.physical.POptimizeOp;
import skewreduce.framework.physical.PReoptimizeOp;
import skewreduce.framework.physical.PhysicalOp;
import skewreduce.lib.Partition;



public class EaggPlan
implements VertexStyleProvider<PhysicalOp>, 
          EdgeStyleProvider<EaggPlan.Dependency> {
    protected static final Logger LOG = LoggerFactory.getLogger("PLAN");
    
    public static final String MANUAL_PARTITION_PLAN_ATTR = "skewreduce.partition.manual.file";
    public static final String INIT_PARTITION_STRATEGY_ATTR = "skewreduce.partition.initStrategy";
    public static final String EAGG_HTTP_RPC_URI_ATTR = "skewreduce.httpRpc.uri";

    // partition plan
    // static
    // fully dynamic

    @SuppressWarnings("serial")
	public static class Dependency extends DefaultEdge {
        public enum Status {
            WAITING,
            PIPELINE,
            COMPLETE,
            FAILED
        }

        private Status state;

        public Status getStatus() { return state; }
        public void setStatus(Status s) { state = s; }
    }

    private DirectedGraph<PhysicalOp,Dependency> jobGraph;
    private DOTExporter<PhysicalOp,Dependency> exporter;

    private Set<PhysicalOp> waiting   = new HashSet<PhysicalOp>();
    private Set<PhysicalOp> running   = new HashSet<PhysicalOp>();
    
    /*
     * pipelined intermediate merge operators are placed here.
     * on completion of aggregated merge operator, all are cleared.
     */
    private IdentityHashMap<PhysicalOp,Set<PhysicalOp>> pipelined = new IdentityHashMap<PhysicalOp,Set<PhysicalOp>>();
    private MergeOpTree mtree = new MergeOpTree();

    /////////////////////////////////////
    
    private final EaggDriver driver;
    private final int clusterCapacity;
    private Path workDir;
    private final Path inputPath;
    private final Path outputPath;
    private final Partition partitionInfo;

    private final LSampleOp lop_sample;
    private final LPartitionOp lop_partition;
    private final LProcessOp lop_work;
    private final LMergeOp lop_merge;
    private final LFinalizeOp lop_finalize;
    
    private LogicalOp  lop_opt;
    private LogicalOp  lop_reopt;
    private LogicalOp  lop_gen;

    private PhysicalOp pop_rootPartition;
    private PhysicalOp pop_finalMerge;
    private PhysicalOp pop_finalize;
    
    
    private File manualPartitionFile;
    private OperatorSchedule opSchedule;
    
    public enum InitialPartitionStrategy {
    	STATIC, // do as we did so far
    	PARTIAL_DYNAMIC, // use mapper implementation to further partition the longest task
    	FULLY_DYNAMIC // use reducer implementation during partition phase
    }
    private InitialPartitionStrategy initPartStrategy = InitialPartitionStrategy.STATIC;
    public InitialPartitionStrategy getInitialPartitionStrategy() { return initPartStrategy; }

    public PhysicalOp getRootPartitionOp() {
        return pop_rootPartition;
    }
    public PhysicalOp getFinalizeOp() {
        return pop_finalize;
    }
    public PhysicalOp getFinalMergeOp() {
        return pop_finalMerge;
    }

    /**
     * 
     * @param driver application specific logic
     * @param in path to job input
     * @param out path to job output
     * @throws IOException
     */
    public EaggPlan(EaggDriver driver,Path in,Path out,Partition root) throws IOException {
        this.driver = driver;
        this.inputPath = in;
        this.outputPath = out;
        this.partitionInfo = root;

        jobGraph = new DefaultDirectedGraph<PhysicalOp,Dependency>(Dependency.class);
        // generate random work directory

        String path = String.format( "/tmp/hadoop-${user.name}/tmp-%s-%d",
                driver.getAppName(),
                System.currentTimeMillis());
        driver.getConf().set("skewreduce.workingdir",path);
        path = driver.getConf().get("skewreduce.workingdir");

        workDir = new Path(path);

        FileSystem fs = driver.getFileSystem();

        LOG.info("Using file system = "+fs.getUri());
        LOG.info("Working directory = "+workDir.toUri());

        fs.mkdirs(workDir);
        if ( driver.getConf().getBoolean("skewreduce.workingdir.deleteonexit",true) )
            fs.deleteOnExit(workDir);
        
        
        // check whether has a manual plan
        String manualFileName = driver.getConf().get(MANUAL_PARTITION_PLAN_ATTR);
        if ( manualFileName != null ) {
        	manualPartitionFile = new File(manualFileName);
        	if ( ! manualPartitionFile.exists() || manualPartitionFile.length() == 0 ) {
        		manualPartitionFile = null;
        	} else {
        		LOG.info("Using a manual partition plan = {}",manualPartitionFile);
        		
                DataInputStream pf = new DataInputStream(new FileInputStream(manualPartitionFile));
                partitionInfo.readFields(pf);
                pf.close();
        	}
        }
        
        String initPartStr = driver.getConf().get(INIT_PARTITION_STRATEGY_ATTR);
        if ( initPartStr != null ) {
        	this.initPartStrategy = Enum.valueOf(InitialPartitionStrategy.class, initPartStr);
        }
        
        opSchedule = new OperatorSchedule(driver.getConf());
        
        JobClient jc = new JobClient(new JobConf(driver.getConf()));
        ClusterStatus cs = jc.getClusterStatus();
        clusterCapacity = cs.getMaxMapTasks();
        jc.close();

        // setup initiators
        lop_sample = driver.createLSampleOp();
        lop_partition = driver.createLPartitionOp();
        lop_work = driver.createLWorkOp();
        lop_merge = driver.createLMergeOp();
        lop_finalize = driver.createLFinalizeOp();
        
        lop_opt = POptimizeOp.createLocalOp(driver);
        lop_reopt = PReoptimizeOp.createLogicalOp(driver);
        lop_gen = PGenerateOp.createLocalOp(driver);
    }

    public synchronized PhysicalOp createSampleOp(Partition context) {
    	PhysicalOp op = lop_sample.createPhysicalOp(this, context);
    	jobGraph.addVertex(op);
    	waiting.add(op);
        opid2pop.put(op.getID().toString(),op);
    	return op;
    }

    public synchronized PhysicalOp createPartitionOp(Partition partition) {
        PhysicalOp op = lop_partition.createPhysicalOp(this,partition);
        jobGraph.addVertex(op);
        waiting.add(op);
        opid2pop.put(op.getID().toString(),op);
        return op;
    }
    public synchronized PhysicalOp createWorkOp(Partition partition) {
        PhysicalOp op = lop_work.createPhysicalOp(this,partition);
        jobGraph.addVertex(op);
        waiting.add(op);
        opid2pop.put(op.getID().toString(),op);
        return op;
    }
    public synchronized PhysicalOp createMergeOp(Partition partition) {
        PhysicalOp op = lop_merge.createPhysicalOp(this,partition);
        jobGraph.addVertex(op);
        waiting.add(op);
        opid2pop.put(op.getID().toString(),op);
        return op;
    }
    public synchronized PhysicalOp createFinalizeOp(Partition context) {
        PhysicalOp op = lop_finalize.createPhysicalOp(this,context);
        jobGraph.addVertex(op);
        waiting.add(op);
        opid2pop.put(op.getID().toString(),op);
        return op;
    }

    public synchronized PGenerateOp createGenerateOp(Partition context) {
        PGenerateOp op = (PGenerateOp)lop_gen.createPhysicalOp(this,context);
        jobGraph.addVertex(op);
        waiting.add(op);
        return op;
    }
    public synchronized PhysicalOp createOptimizeOp(Partition context) {
        PhysicalOp op = lop_opt.createPhysicalOp(this,context);
        jobGraph.addVertex(op);
        waiting.add(op);
        return op;
    }
    public synchronized PhysicalOp createReoptimizeOp(Partition context) {
        PhysicalOp op = lop_reopt.createPhysicalOp(this,context);
        jobGraph.addVertex(op);
        waiting.add(op);
        return op;
    }
    
    
    public void setRootPartitionOp(Partition root) {
        pop_rootPartition = createPartitionOp(root);
    }
    public void setFinalizeOp(Partition root) {
        pop_finalize = createFinalizeOp(root);
    }
    public void setFinalMergeOp(PhysicalOp op) {
        pop_finalMerge = op;
        mtree.setRoot(op);
    }

    public void addDependency(PhysicalOp from,PhysicalOp to) {
        addDependency(from,to,Dependency.Status.WAITING);
    }

    public synchronized void addDependency(PhysicalOp from,PhysicalOp to,Dependency.Status stat) {
        Dependency dep = jobGraph.addEdge(from,to);
        dep.setStatus(stat);

        if ( from.getType() == LogicalOp.Type.MERGE
                && to.getType() == LogicalOp.Type.MERGE ) {
            mtree.addParentChild(to,from);
        }
    }
    
    public synchronized void removeDependency(PhysicalOp from,PhysicalOp to) {
    	jobGraph.removeEdge(from,to);
    }

    public synchronized List<PhysicalOp> getUpStreamOps(PhysicalOp op) {
        Set<Dependency> deps = jobGraph.incomingEdgesOf(op);
        ArrayList<PhysicalOp> ops = new ArrayList<PhysicalOp>(deps.size());
        for ( Dependency dep : deps ) {
            if ( dep.getStatus() == Dependency.Status.COMPLETE )
                ops.add( jobGraph.getEdgeSource(dep) );
        }
        return ops;
    }

    public synchronized List<PhysicalOp> getDownStreamOps(PhysicalOp op) {
        Set<Dependency> deps = jobGraph.outgoingEdgesOf(op);
        ArrayList<PhysicalOp> ops = new ArrayList<PhysicalOp>(deps.size());
        for ( Dependency dep : deps ) {
            ops.add( jobGraph.getEdgeTarget(dep) );
        }
        return ops;
    }
    
    /**
     * check <code>op</code> is a pipelined instance.
     * @param op
     * @return <code>true</code> if upstream operators are pipelined to this one
     */
    public boolean isPipelined(PhysicalOp op) {
    	Set<Dependency> deps = jobGraph.incomingEdgesOf(op);
    	for (Dependency dep: deps) {
    		if ( dep.getStatus() != Dependency.Status.PIPELINE ) {
    			return false;
    		}
    	}
    	return true;
    }
    
    public void reset(PhysicalOp op) {
    	Set<Dependency> deps = jobGraph.incomingEdgesOf(op);
    	Set<PhysicalOp> pipe = pipelined.remove(op);
    	for ( Dependency dep : deps ) {
    		if ( dep.getStatus() != Dependency.Status.PIPELINE )
    			throw new IllegalStateException(op.getID()+" is not a pipelined instance");
    		dep.setStatus(Dependency.Status.WAITING);
    		PhysicalOp upOp = jobGraph.getEdgeSource(dep);
    		if ( ! pipe.isEmpty() ) {
    			// extract pipeline
    			HashSet<PhysicalOp> ops = new HashSet<PhysicalOp>();
    			for ( PhysicalOp o : pipe ) {
    				if ( o.isUpStreamOf(upOp) ) {
    					ops.add(o);
    				}
    			}
    			pipelined.put(upOp,ops);
    		}
    		waiting.add(upOp);	// make it available for scheduling
    	}
    	waiting.add(op);	// put the operator back to waiting
    }

    ///////////////////////////////////////////////////////////////////////////
    // context information

    public final Path getWorkDir() { return workDir; }
    public final EaggDriver getDriver() { return driver; }
    public final FileSystem getFileSystem() throws IOException { return driver.getFileSystem(); }
    public final Path getJobInputPath() { return inputPath; }
    public final Path getJobOutputPath() { return outputPath; }
    public final Partition getPartitionInfo() { return partitionInfo; }
    public final int getClusterCapacity() { return clusterCapacity; }
    
    public final boolean hasManualPlan() { return manualPartitionFile != null; }
    public File getManualPartitionFile() {
		return manualPartitionFile;
	}
    public OperatorSchedule getSchedule() { return opSchedule; }
    
    public synchronized Set<PhysicalOp> getReadyTasks() {
        if ( waiting.isEmpty() )
            return Collections.emptySet();

        HashSet<PhysicalOp> ready2run = new HashSet<PhysicalOp>();
        for ( PhysicalOp t : waiting ) {
            if ( isReady(t) ) {
                ready2run.add(t);
                t.setStatus(PhysicalOp.Status.READY);
            }
        }

        return ready2run;
    }

    public synchronized void scheduled(Set<PhysicalOp> w) {
        if ( w.size() == 0 ) return;
        running.addAll(w);
        waiting.removeAll(w);
    }

    public synchronized void scheduled(PhysicalOp w) {
        waiting.remove(w);
        running.add(w);
    }
    
    public synchronized void complete(PhysicalOp t) {
        Set<Dependency> deps = jobGraph.outgoingEdgesOf(t);
        for ( Dependency dep : deps ) {
            dep.setStatus(Dependency.Status.COMPLETE);
        }
        // pipeline if there is any
        running.remove(t);
        pipelined.remove(t);
    }

    // FIXME: failure handling
    public synchronized void fail(PhysicalOp t) {
        Set<Dependency> deps = jobGraph.outgoingEdgesOf(t);
        for ( Dependency dep : deps ) {
            dep.setStatus(Dependency.Status.FAILED);
        }
        running.remove(t);
    }
    
    public void retry(PhysicalOp t) {
    	// remove schedule from running and put it waiting
    	running.remove(t);
    	waiting.add(t);
    }
    
    public boolean isReady(PhysicalOp t) {
        Set<Dependency> deps = jobGraph.incomingEdgesOf(t);
        for ( Dependency dep : deps ) {
            if ( dep.getStatus() == Dependency.Status.WAITING 
                    || dep.getStatus() == Dependency.Status.FAILED )
                return false;
        }
        return true;
    }
    
    public synchronized boolean isCompleted() {
        return waiting.isEmpty() && running.isEmpty();
    }
    
    // for manual partition
    
    private int              rootPartitionLevel = Integer.MAX_VALUE;
    private List<PhysicalOp> partitionStack = new ArrayList<PhysicalOp>(64);
    
    int getRootPartitionLevel() { return rootPartitionLevel; }
    
    boolean needPartitionOp(Partition partition) {
    	return ! partitionStack.isEmpty()
    			|| ( (partition.hasLeft() && partition.getLeft().isLeaf())
    					|| (partition.hasRight() && partition.getRight().isLeaf()) );
    }
    
    /**
     * called when visiting MERGE node which requires partition operator
     * @param partition
     * @return
     */
    PhysicalOp pushPartitionOp(Partition partition) {
    	PhysicalOp pop = createPartitionOp(partition);
    	if ( ! partitionStack.isEmpty() ) {
    		// add dependency to top operator
    		addDependency(partitionStack.get(partitionStack.size()-1),pop);
    	}
    	partitionStack.add(pop);
    	
    	// update root partition
    	if ( partition.getLevel() < rootPartitionLevel ) {
    		if ( pop_rootPartition != null ) {
    			// delete this node from graph. will invalidate all false dependency
    			jobGraph.removeVertex(pop_rootPartition);
    			waiting.remove(pop_rootPartition);
    		}
    		rootPartitionLevel = partition.getLevel();
    		pop_rootPartition = createPartitionOp(partitionInfo);
    	} else if ( partition.getLevel() == rootPartitionLevel+1 ) {
            addDependency(pop_rootPartition,pop);
        }

    	return pop;
    }
    
    /**
     * called when leaving MERGE op
     */
    void popPartitionOp() {
    	if ( ! partitionStack.isEmpty() ) {
    		partitionStack.remove(partitionStack.size()-1);
    	}
    }
    
    PhysicalOp createWorkOp2(Partition partition) {
    	PhysicalOp pwork = createWorkOp(partition);
    	addDependency(partitionStack.get(partitionStack.size()-1),pwork);
    	if ( partition.getLevel() == rootPartitionLevel+1 )
    		addDependency(pop_rootPartition,pwork);
    	return pwork;
    }

    void removeDanglingPartitionOp() {
        Set<PhysicalOp> ops = jobGraph.vertexSet();
        Set<PhysicalOp> dangling = new HashSet<PhysicalOp>();
        for ( PhysicalOp op : ops ) {
            if ( op.getType() == LogicalOp.Type.PARTITION
                    && op.getBound().getLevel() == rootPartitionLevel ) {
                dangling.add(op);
            }
        }
        for ( PhysicalOp op : dangling ) {
            jobGraph.removeVertex(op);
            waiting.remove(op);
        }
    }
    
    private ConcurrentHashMap<String,PhysicalOp> opid2pop = new ConcurrentHashMap<String,PhysicalOp>();

    /*
    public void registerRunningPhysicalOp(PhysicalOp op) {
        JobID jobid = op.getJobID();
        jobid2pop.put(jobid,op);
        if ( LOG.isDebugEnabled() ) {
            LOG.debug(String.format("Register job %s",jobid));
        }
    }
    */

    public PhysicalOp unregisterRunningPhysicalOp(String jobid) {
    //public PhysicalOp unregisterRunningPhysicalOp(JobID jobid) {
        if ( LOG.isDebugEnabled() ) {
            LOG.debug(String.format("Unregister job %s",jobid));
        }
        return opid2pop.remove(jobid);
    }
   
	
    //////////////////////////////////////////////////////////////////////////
    // generate plan graph in DOT format

    public synchronized void exportDot(String fn) throws IOException {
        if ( exporter == null )
            exporter = new DOTExporter<PhysicalOp,Dependency>(this,this);

        PrintWriter wr = new PrintWriter(fn);
        exporter.export(wr,jobGraph);
        wr.close();
    }

    public String getVertexName(PhysicalOp v) {
        return v.getID().toString();
    }
    public String getVertexStyle(PhysicalOp v) {
        StringBuffer buf = new StringBuffer(1024);

        buf.append("shape=");
        switch ( v.getType() ) {
            case SAMPLE: buf.append("ellipse"); break;
            case PARTITION: buf.append("trapezium"); break;
            case WORK: buf.append("ellipse"); break;
            case MERGE: buf.append("invhouse"); break;
            case FINALIZE: buf.append("ellipse"); break;
            default: buf.append("rect");
        }

        PhysicalOp.Status state = v.getStatus();
        if ( state == PhysicalOp.Status.READY ) {
            buf.append(",color=azure3");
        } else if ( state == PhysicalOp.Status.RUNNING ) {
            buf.append(",color=gold");
        } else {
            if ( v.isSuccessful() ) {
                buf.append(",color=chartreuse");
            } else {
                buf.append(",color=crimson");
            }
        }

        return buf.toString();
    }

    public String getEdgeName(Dependency e) { return null; }
    public String getEdgeStyle(Dependency e) {
        if ( e == null )
            throw new IllegalArgumentException("Edge is null?!");

        switch ( e.getStatus() ) {
            case WAITING:
                return "color=azure3";
            case COMPLETE:
                return "color=black";
            case FAILED:
                return "color=red";
            case PIPELINE:
                return "color=black,style=dashed";
        }
        return null;
    }

}
