package skewreduce.framework.physical;

import java.io.DataInput;
import java.io.DataOutput;
import java.io.IOException;
import java.util.ArrayList;
import java.util.EnumMap;
import java.util.List;
import java.util.Map;
import java.util.concurrent.Callable;

import org.apache.hadoop.conf.Configurable;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.Writable;
import org.apache.hadoop.mapreduce.Counter;
import org.apache.hadoop.mapreduce.CounterGroup;
import org.apache.hadoop.mapreduce.Job;
import org.apache.hadoop.mapreduce.JobID;
import org.apache.hadoop.mapreduce.lib.input.FileInputFormat;
import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;

import skewreduce.framework.EaggPlan;
import skewreduce.framework.SchedulerEvent;
import skewreduce.framework.logical.LogicalOp;
import skewreduce.lib.Partition;



/**
 * physical instantiation of a logical operator.
 */
public abstract class PhysicalOp
implements Comparable<PhysicalOp>, Callable<SchedulerEvent>, Configurable {
	private static Logger LOG = LoggerFactory.getLogger("PLAN");

    public static enum Status {
        WAITING,
        READY,
        RUNNING,
        COMPLETE,
        FAILED
    }

    public static enum StatCounter {
        MERGE_STATE_COUNT,
        MERGE_OUTPUT_COUNT,
        MERGE_STATE_SIZE,
        MERGE_OUTPUT_SIZE,
        TOTAL_EXECUTION_TIME
    }

    private EnumMap<StatCounter,Long> statistics;

    /*
     * set final statistics
     * @param group skewreduce counter group
     */
    protected void setStatistics(CounterGroup group) {
        statistics = new EnumMap<StatCounter,Long>(StatCounter.class);
        Counter c = group.findCounter("MUX_WRITE_0");
        long count = c.getValue();
        statistics.put(StatCounter.MERGE_STATE_COUNT,count);
        statistics.put(StatCounter.MERGE_STATE_SIZE,count * getConf().getInt("skewreduce.merge.state.recordsize",0));

        c = group.findCounter("MUX_WRITE_1");
        count = c.getValue();
        statistics.put(StatCounter.MERGE_OUTPUT_COUNT,count);
        statistics.put(StatCounter.MERGE_OUTPUT_SIZE,count * getConf().getInt("skewreduce.merge.output.recordsize",0));
    }

    public long getStatistics(StatCounter k) {
        Long v = null;
        if ( statistics != null )
            v = statistics.get(k);
        return ( v == null ) ? 0L : v;
    }

    /**
     * represent id of a physical operator. comparison will return
     * rough ordering of scheduling
     */
    public static final class ID
    implements Comparable<ID>, Writable {
        final LogicalOp.Type type;
        final int lv;     // level
        final long id;    // bit id
        final String sid;
        
        public ID(LogicalOp.Type type,Partition.ID pid) {
        	this(type,pid.getLevel(),pid.getID());
        }
        
        public ID(LogicalOp.Type type,int lv,long id) {
        	this.type = type;
        	this.lv = lv;
        	this.id = id;
            sid  = String.format("%s-%d-%d",type.name(),lv,id);
        }
        
        public ID(String s) {
        	String[] flds = s.split("-");
        	type = LogicalOp.Type.valueOf(LogicalOp.Type.class,flds[0]);
        	lv = Integer.parseInt(flds[1]);
        	id = Long.parseLong(flds[2]);
        	sid = s;
        }
        
        public ID(Partition p) {
        	type = ( p.isLeaf() ) ? LogicalOp.Type.WORK : LogicalOp.Type.MERGE;
        	lv = p.getLevel();
        	id = p.getID();
            sid  = String.format("%s-%d-%d",type.name(),lv,id);
        }
        
        public ID(String tname,ID other) {
        	this.type = LogicalOp.Type.valueOf(LogicalOp.Type.class,tname);
        	this.lv = other.lv;
        	this.id = other.id;
        	this.sid = String.format("%s-%d-%d",type.name(),lv,id);
        }

        ID(PhysicalOp op) {
            type = op.getType();
            lv   = op.getBound().getLevel();
            id   = op.getBound().getID();
            sid  = String.format("%s-%d-%d",type.name(),lv,id);
        }
        
        public LogicalOp.Type getType() { return type; }
        public int getLevel() { return lv; }
        public long getId() { return id; }
        public Partition.ID getPartitionID() { return new Partition.ID(id,lv); }

        @Override
        public boolean equals(Object o) {
            if ( this == o ) return true;
            if ( o instanceof ID ) {
                ID x = (ID)o;
                return (lv == x.lv) && (id == x.id) && (type == x.type);
            } else if ( o instanceof Partition.ID ) {
            	Partition.ID x = (Partition.ID)o;
            	return ( lv == x.getLevel() && id == x.getID() );
            } else if ( o instanceof PhysicalOp ) {
                return equals( ((PhysicalOp)o).getID() );
            }
            return sid.equals(o);
        }

        @Override
        public String toString() { return sid; }

        @Override
        public int hashCode() { return sid.hashCode(); }

        @Override
        public int compareTo(ID o) {
            int rc = type.compareTo(o.type);
            if ( rc != 0 ) return rc;
            // now compare level. higher level should go first
            if ( lv < o.lv ) return 1;
            else if ( lv > o.lv ) return -1;

            return (int)(id - o.id);
        }

		@Override
		public void readFields(DataInput in) throws IOException {
			throw new UnsupportedOperationException("Use createID() instead");
		}

		@Override
		public void write(DataOutput out) throws IOException {
			out.writeInt(type.ordinal());
			out.writeInt(lv);
			out.writeLong(id);
		}
		
		public static ID createID(DataInput in) throws IOException {
			LogicalOp.Type t = LogicalOp.Type.class.getEnumConstants()[in.readInt()];
			int lv = in.readInt();
			long id = in.readLong();
			return new ID(t,lv,id);
		}
    }

    protected LogicalOp  lop;
    protected Configuration extraConf; // extra configuration instance used by optimizer. later merged with logical op
    protected ID         id;
    protected Partition  bound;  // associated boundary
    protected EaggPlan   plan;	 // plan it belongs to
    protected List<Path> input;
    protected Path       output;    // output path
    protected Job        job;       // associated MR job
    protected volatile Status     status;
    protected int        retry; 	// retry count
    
    protected long scheduledAt;
    protected long completedAt;

    protected PhysicalOp() {
        this.input = new ArrayList<Path>();
        this.status = Status.WAITING;
        this.extraConf = new Configuration(false);
    }
    
    public void initialize(EaggPlan plan,LogicalOp lo,Partition context) {
    	this.lop = lo;
    	this.bound = context;
    	this.plan = plan;
        this.id    = new ID(this);
        setConf(lo.getConfiguration());
    }

    @Override
	public Configuration getConf() {
		return ( job == null ) ? lop.getConfiguration() : job.getConfiguration();
	}
    
    /**
     * used by an optimizer or planner to pass extra information
     * @return
     */
    public Configuration getExtraConf() {
    	return extraConf;
    }

	@Override
	public void setConf(Configuration conf) {}
    
    public final LogicalOp.Type   getType()   { return lop.getType(); }
    public final Partition getBound()  { return bound;  }
    public final Status getStatus() { return status; }
    public final void   setStatus(Status s) { status = s; }
    public final boolean isReady() { return status == Status.READY; }
    public final ID     getID()     { return id; }
    public final JobID  getJobID()  {
    	return ( job == null ) ? null : job.getJobID();
    }
    public final EaggPlan getPlan() { return plan; }
    public final int getRetryCount() { return retry; }
    public final void incrRetryCount() { ++retry; }

    public List<Path> getInputPaths() { return input; }
    public void addInputPath(Path in) { input.add(in); }
    public Path getInputPath() {
    	if ( input.size() != 1 )
    		throw new IllegalStateException(String.format("expected single input file: %d present", input.size()));
    	return input.get(0);
    }

    public Path getOutputPath() { return output; }
    public void setOutputPath(Path p) { output = p; }

    public final int  getNumOutputStreams() {
        return lop.getNumOutputStreams();
    }

    /*
    public long getOutputCardinality(int i) {
        return outCard[i];
    }

    public long getInputBytes(Configuration conf) throws IOException {
        if ( inputBytes == 0 ) {
            if ( input.size() == 0 )
                throw new IllegalStateException("not ready yet");

            FileSystem fs = FileSystem.get(conf);
            for ( Path i : input ) {
                for ( FileStatus s : fs.globStatus(i) ) {
                    inputBytes += s.getLen();
                }
            }
        }
        return inputBytes;
    }
    */

    @Override
    public final int hashCode() { return id.hashCode(); }

    @Override
    public final boolean equals(Object o) {
        if ( o == this ) return true;
        if ( o instanceof ID )
            return id.equals(o);
        return false;
    }

    @Override
    public final int compareTo(PhysicalOp o) {
        return id.compareTo(o.getID());
    }

    public static final String PARTITION_SPEC_ATTR = "skewreduce.partition.spec";
    public static final String PARTITION_CLASS_ATTR = "skewreduce.partition.class";
    public static final String PARTITION_SPEC_FILE_ATTR = "skewreduce.partition.spec.file";

    protected Path getOutputByContext(PhysicalOp op) {
        throw new UnsupportedOperationException();
    }

    protected void setupInput(Job job,List<Path> input) throws IOException {
        FileInputFormat.setInputPaths(job,input.toArray(new Path[0]));
    }

    protected void setupInputOutput(EaggPlan plan) throws IOException {
        // input
        if ( input.isEmpty() ) {
            for ( PhysicalOp op : plan.getUpStreamOps(this) ) {
                Path in = op.getOutputByContext(this);
                if ( LOG.isDebugEnabled() ) LOG.debug("Adding input = "+in);
                input.add(in);
                //FileInputFormat.addInputPath(job,in);
            }
            //Collections.addAll(input,FileInputFormat.getInputPaths(job));
        } /*else {
            FileInputFormat.setInputPaths(job,input.toArray(new Path[0]));
        }*/

        // output
        if ( output == null ) {
            output = new Path( plan.getWorkDir(), getID().toString() );
        }
        if ( LOG.isDebugEnabled() ) {
            LOG.debug("Setting output = "+output);
        }

        job = createJob(lop.getConfiguration());
        if ( job == null ) {
        	LOG.info("{}: Do not instantiate a MapReduce job",getID().toString());
        } else {
	    	Configuration jobConf = job.getConfiguration();
	
	        // copy all extra configurations
	        for ( Map.Entry<String, String> e : extraConf ) {
	        	jobConf.set(e.getKey(), e.getValue());
	        }

            // setup job end notification
            jobConf.set("job.end.notification.url",String.format("%s/e/%s?$jobStatus",jobConf.get(EaggPlan.EAGG_HTTP_RPC_URI_ATTR),getID()));
		        
	        job.setJobName(getID().toString());
	
	        if ( LOG.isDebugEnabled() ) {
	            LOG.debug("Initializing job = "+job.getJobName());
	        }
        
	        setupInput(job,input);
	
	        FileOutputFormat.setOutputPath(job,output);
        }
    }
    
    public void setup() throws IOException {
    	setup(plan);
    }

    public void setup(EaggPlan plan) throws IOException {
        setupInputOutput(plan);

        Configuration jobConf = job.getConfiguration();
        jobConf.set(PARTITION_SPEC_ATTR,bound.toSpec());

        // FIXME: setup cardinality information
    }

    protected abstract Job createJob(Configuration conf) throws IOException;
    
    public boolean isUpStreamOf(PhysicalOp op) {
    	return op.getBound().isAncestorOf(bound);
    }
    public boolean isDownStreamOf(PhysicalOp op) {
    	return bound.isAncestorOf(op.getBound());
    }

    /*
     * operator control API
     */
    protected void ensureState(Status desired)
    throws IllegalStateException {
        if ( status != desired )
            throw new IllegalStateException(String.format("State %s is desired but we are in %s",desired.name(),status.name()));
    }

    public void kill() {
        ensureState(Status.RUNNING);
        try { job.killJob(); } catch ( IOException ignore ) {}
        setStatus(Status.FAILED);
    }

    public boolean isSuccessful() {
    	if ( status == Status.RUNNING ) {
    		isComplete();	// will cache result.
    	}
    	return status == Status.COMPLETE;
    }

    public void setComplete() { status = Status.COMPLETE; completedAt = System.currentTimeMillis(); }
    public void setFailed() { status = Status.FAILED; }
    
    public boolean isComplete() {
    	if ( status == Status.RUNNING ) {
    		try {
    			if ( job.isComplete() ) {
    				if ( job.isSuccessful() ) {
    					status = Status.COMPLETE;
    				} else {
    					status = Status.FAILED;
    				}
    			}
    		} catch ( IOException ex ) {
    			LOG.warn("Failed to query status of job "+getID(),ex);
    		}
    	}
    	return ( status != Status.RUNNING && status != Status.WAITING );
    }

    public void onSuccess() throws Exception {}
    public void onFail() throws Exception {}
    public void cleanup() {}
    
    public long getScheduledAt() { return scheduledAt; }
    public long getCompletedAt() { return completedAt; }
    public long getRuntimeAtClient() { return completedAt - scheduledAt; }
}
