package skewreduce.framework;

import java.io.File;
import java.lang.reflect.*;
import java.net.URL;
import java.util.Properties;

import org.apache.log4j.PropertyConfigurator;

import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.FileStatus;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.mapreduce.*;
import org.apache.hadoop.mapreduce.lib.input.FileSplit;
import org.apache.hadoop.util.GenericOptionsParser;
import org.apache.hadoop.util.ReflectionUtils;

/**
 * fake runner for local debugging
 */
public class FakeRunner {
    static class BogusStatusReporter extends StatusReporter {
		Counters counters = new Counters();
		
		@Override
		public Counter getCounter(Enum<?> key) {
			return counters.findCounter(key);
		}

		@Override
		public Counter getCounter(String arg0, String arg1) {
			return counters.findCounter(arg0,arg1);
		}

		@Override
		public void progress() {
			// do nothing
		}

		@Override
		public void setStatus(String arg0) {
			// do nothing
		}
	}

    static final String LOG4J_PROP = "log4j.properties";
    static final String LOG_CONFIG_ATTR = "skewreduce.log4j";

    private static void configureLog(Configuration conf) {
        String log4j = conf.get(LOG_CONFIG_ATTR);
        File f = null;

        if ( log4j == null ) {
            // search logging file in current directory
            f = new File(LOG4J_PROP);
        } else {
            f = new File(log4j);
        }
        if ( f.exists() ) {
            System.err.println("Setting up logging from "+f.getPath());
            PropertyConfigurator.configure(f.getPath());
            return;
        } else {
            // try to load from local resource bundle
            URL url = EaggDriver.class.getResource("/skewreduce/"+LOG4J_PROP);
            if ( url != null ) {
                System.err.println("Setting up logging from default properties");
                PropertyConfigurator.configure(url);
                return;
            }
        }

        System.err.println("Setting up basic logging");

        // configure a console
        Properties props = new Properties();
        props.setProperty("log4j.rootLogger","INFO, CONSOLE");
        props.setProperty("log4j.appender.CONSOLE","org.apache.log4j.ConsoleAppender");
        props.setProperty("log4j.appender.CONSOLE.layout","org.apache.log4j.PatternLayout");
        props.setProperty("log4j.appender.CONSOLE.layout.ConversionPattern","%d [%t] %-5p %c - %m%n");
        props.setProperty("log4j.appender.CONSOLE.target","System.err");

        PropertyConfigurator.configure(props);
    }
    
	@SuppressWarnings("unchecked")
	public static <INKEY,INVALUE,OUTKEY,OUTVALUE> void main(String[] args) throws Exception {
        GenericOptionsParser options = new GenericOptionsParser(new Configuration(),args);
        Configuration conf = options.getConfiguration();
        
        String[] rest = options.getRemainingArgs();
        String inputStr = null;
        String outputStr = "./output-tmp";
        Class<?> taskClass = null;
        
        for ( int i = 0; i < rest.length; ++i ) {
        	if ( rest[i].equals("-input") ) {
        		inputStr = rest[++i];
        	} else if ( rest[i].equals("-output") ) {
        		outputStr = rest[++i];
        	} else if ( rest[i].equals("-taskClass") ) {
        		taskClass = conf.getClassByName(rest[++i]);
        	}
        }

        conf.set("fs.default.name","file:///");
        conf.set("mapred.output.dir", outputStr);
        //conf.setInt("skewreduce.monitoring.num.loops",4);
        
        configureLog(conf);
        
        Method method = taskClass.getMethod("getJobInstance", Configuration.class);
        Job job = (Job)method.invoke(null,conf);
        //Job job = skewreduce.dfof.LocalFoF.getJobInstance(conf);
        Configuration jobConf = job.getConfiguration();

        TaskAttemptID attemptID = new TaskAttemptID("local",0,true,0,0);
        TaskAttemptContext attemptContext = new TaskAttemptContext(jobConf,attemptID);
        // prepare context
        Mapper<INKEY,INVALUE,OUTKEY,OUTVALUE> mapper = (Mapper<INKEY,INVALUE,OUTKEY,OUTVALUE>)ReflectionUtils.newInstance(attemptContext.getMapperClass(),jobConf);
        // assuming all required classes are already in the classpath
        // input split -- construct from local input
        Path inputPath = new Path(inputStr);

        FileSystem localFs = FileSystem.getLocal(jobConf);
        localFs.delete(new Path(outputStr),true);

        FileStatus inputStat = localFs.getFileStatus(inputPath);
        FileSplit split = new FileSplit(inputPath,0,inputStat.getLen(),null);
        
        // output -- set to local output
        // status reporter
        
        InputFormat inputFormat = ReflectionUtils.newInstance(attemptContext.getInputFormatClass(),jobConf);
        OutputFormat outputFormat = ReflectionUtils.newInstance(attemptContext.getOutputFormatClass(),jobConf);
        OutputCommitter committer = outputFormat.getOutputCommitter(attemptContext);

        RecordReader<INKEY,INVALUE> input = inputFormat.createRecordReader(split, attemptContext);
        RecordWriter<OUTKEY,OUTVALUE> output = outputFormat.getRecordWriter(attemptContext);

        Constructor<Mapper.Context> contextConstructor =
            Mapper.Context.class.getConstructor
            (new Class[]{org.apache.hadoop.mapreduce.Mapper.class,
                         Configuration.class,
                         org.apache.hadoop.mapreduce.TaskAttemptID.class,
                         org.apache.hadoop.mapreduce.RecordReader.class,
                         org.apache.hadoop.mapreduce.RecordWriter.class,
                         org.apache.hadoop.mapreduce.OutputCommitter.class,
                         org.apache.hadoop.mapreduce.StatusReporter.class,
                         org.apache.hadoop.mapreduce.InputSplit.class});

        Mapper<INKEY,INVALUE,OUTKEY,OUTVALUE>.Context mapperContext = null;
        
        mapperContext = contextConstructor.newInstance(mapper,
                jobConf,
                attemptID,
                input,
                output,
                committer,
                new BogusStatusReporter(),
                split);

        input.initialize(split, mapperContext);
        mapper.run(mapperContext);
        input.close();
        if ( committer.needsTaskCommit(attemptContext) ) {
            committer.commitTask(attemptContext);
        }
        output.close(mapperContext);
	}
}
