package mapred;

import java.io.IOException;
import java.util.HashMap;
import java.util.Map;
import java.util.Scanner;
import java.util.concurrent.Semaphore;

import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.compress.DefaultCodec;
import org.apache.hadoop.io.compress.GzipCodec;
import org.apache.hadoop.mapred.Counters;
import org.apache.hadoop.mapred.FileInputFormat;
import org.apache.hadoop.mapred.FileOutputFormat;
import org.apache.hadoop.mapred.JobClient;
import org.apache.hadoop.mapred.JobConf;
import org.apache.hadoop.mapred.RunningJob;

public abstract class Job extends Thread {

	private Semaphore sem;

	private String jobName;

	private Map<String, String> config;

	private JobConf conf;

	public Job(String jobName, String... settings) {
		this(jobName, new Semaphore(1), settings);
	}

	public Job(String jobName, Semaphore sem, String... settings) {
		super();
		this.setJobName(jobName);
		this.sem = sem;
		config = new HashMap<String, String>();

		this.setConfig("-timeout", 600000);
		this.setConfig("-vmargs", "-Xmx2g");
		this.setConfig("-overwrite", "no");

		int argIndex = 0;
		for (String setting : settings) {
			int pos = setting.indexOf("=");
			if (pos < 0) {
				this.setConfig("args[" + argIndex + "]", setting);
				argIndex++;
			} else {
				this.setConfig(setting.substring(0, pos), setting
						.substring(pos + 1));
			}
		}
	}

	public void run() {
		try {
			System.out.println("Semaphore acquire, sem="
					+ sem.availablePermits());
			sem.acquire();
			System.out.println("Semaphore acquired, sem="
					+ sem.availablePermits());

			conf = new JobConf(this.getClass());
			conf.setJobName(jobName);
			conf.set("mapred.task.timeout", config.get("-timeout"));
			conf.set("mapred.child.java.opts", config.get("-vmargs"));
			FileSystem fs = FileSystem.get(conf);

			setOutput(conf, fs);
			setReduce(conf);
			setCombine(conf);
			setMap(conf);
			setInput(conf, fs);

			setUp(conf, fs);
			System.out.println(jobName);
			System.out.println(config);
			RunningJob job = JobClient.runJob(conf);
			tearDown(conf, fs, job);

			sem.release();
			System.out.println("Semaphore released, sem="
					+ sem.availablePermits());
		} catch (Exception e) {
			throw new RuntimeException(e);
		}
	}

	private void setOutput(JobConf conf, FileSystem fs) throws IOException {
		OutputConfig oc = this.getOutputConfig(fs);
		conf.setOutputFormat(oc.getOutputFormat());
		conf.setOutputKeyClass(oc.getOutputKeyClass());
		conf.setOutputValueClass(oc.getOutputValueClass());
		Path outputPath = new Path(oc.getOutputPath());
		if (config.containsKey("-outputpath")) {
			outputPath = new Path(config.get("-outputpath"));
		}
		FileOutputFormat.setOutputPath(conf, outputPath);
		System.out.println("Output Path: " + outputPath);
		if (fs.exists(outputPath)) {
			if (this.getConfig("-overwrite").toLowerCase().startsWith("y")) {
				fs.delete(outputPath, true);
			} else {
				System.out.println("Output Path " + outputPath.toString()
						+ " already exists, overwrite it?(y/N)");
				Scanner sc = new Scanner(System.in);
				if (sc.next().trim().toLowerCase().startsWith("y")) {
					fs.delete(outputPath, true);
				} else {
					System.exit(0);
				}
			}
		}
		if (config.containsKey("-outputcompress")) {
			String outputCompress = config.get("-outputcompress");
			if (!"null".equalsIgnoreCase(outputCompress)) {
				conf.setCompressMapOutput(true);
				if ("default".equalsIgnoreCase(outputCompress)) {
					FileOutputFormat.setOutputCompressorClass(conf,
							DefaultCodec.class);
				} else if ("gzip".equalsIgnoreCase(outputCompress)) {
					FileOutputFormat.setOutputCompressorClass(conf,
							GzipCodec.class);
				}
			}
		} else {
			if (oc.isCompressOutput()) {
				FileOutputFormat.setCompressOutput(conf, true);
				FileOutputFormat.setOutputCompressorClass(conf, oc
						.getOutputCompressorClass());
			}
		}
	}

	private void setReduce(JobConf conf) {
		ReduceConfig rc = this.getReduceConfig();
		conf.setReducerClass(rc.getReducerClass());
		if (config.containsKey("-reduce")) {
			conf.setNumReduceTasks(Integer.parseInt(config.get("-reduce")));
		} else {
			conf.setNumReduceTasks(rc.getNumReduceTasks());
		}
		conf.setMaxReduceAttempts(10);
	}

	private void setCombine(JobConf conf) {
		CombineConfig cc = this.getCombineConfig();
		if (cc.getCombinerClass() != null) {
			conf.setCombinerClass(cc.getCombinerClass());
		}
	}

	private void setMap(JobConf conf) {
		MapConfig mc = this.getMapConfig();
		conf.setMapperClass(mc.getMapperClass());
		if (config.containsKey("-map")) {
			conf.setNumMapTasks(Integer.parseInt(config.get("-map")));
		} else {
			conf.setNumMapTasks(mc.getNumMapTasks());
		}
		conf.setMapOutputKeyClass(mc.getOutputKeyClass());
		conf.setMapOutputValueClass(mc.getOutputValueClass());
		if (config.containsKey("-mapcompress")) {
			String mapCompress = config.get("-mapcompress");
			if (!"null".equalsIgnoreCase(mapCompress)) {
				conf.setCompressMapOutput(true);
				if ("default".equalsIgnoreCase(mapCompress)) {
					conf.setMapOutputCompressorClass(DefaultCodec.class);
				} else if ("gzip".equalsIgnoreCase(mapCompress)) {
					conf.setMapOutputCompressorClass(GzipCodec.class);
				}
			}
		} else {
			if (mc.isCompressMapOutput()) {
				conf.setCompressMapOutput(true);
				conf.setMapOutputCompressorClass(mc.getOutputCompressorClass());
			}
		}

		conf.setMaxMapAttempts(10);
		conf.setMaxMapTaskFailuresPercent(1);
	}

	private void setInput(JobConf conf, FileSystem fs) throws IOException {
		InputConfig ic = this.getInputConfig(fs);
		conf.setInputFormat(ic.getInputFormat());
		String[] paths = ic.getInputPaths();
		if (config.containsKey("-inputpaths")) {
			paths = config.get("-inputpaths").split(",");
		}
		for (String path : paths) {
			Path inputpath = new Path(path);
			if (fs.exists(inputpath)) {
				FileInputFormat.addInputPath(conf, inputpath);
				System.out.println("Input Path: " + inputpath);
			} else {
				System.out.println("Cannot find path " + inputpath);
			}
		}
	}

	public void submit() throws Exception {
		this.start();
		Thread.sleep(1000);
		sem.acquire();
	}

	protected abstract void tearDown(JobConf conf, FileSystem fs, RunningJob job)
			throws IOException;

	protected abstract void setUp(JobConf conf, FileSystem fs)
			throws IOException;

	protected abstract OutputConfig getOutputConfig(FileSystem fs)
			throws IOException;

	protected abstract ReduceConfig getReduceConfig();

	protected abstract CombineConfig getCombineConfig();

	protected abstract MapConfig getMapConfig();

	protected abstract InputConfig getInputConfig(FileSystem fs)
			throws IOException;

	protected String getConfig(String key) {
		return config.get(key.toLowerCase());
	}

	protected void setConfig(String key, Object value) {
		config.put(key.toLowerCase(), value.toString());
	}

	public void setJobName(String name) {
		this.jobName = name.replaceAll("\\s+", "_");
	}

	public void setJobConfConfig(String key, Object value) {
		conf.set(key.toLowerCase(), value.toString());
	}

	public long getCounter(RunningJob job, String group, String name)
			throws IOException {
		return job.getCounters().findCounter(group, name).getCounter();
	}

    public long getCounterSumByGroup(RunningJob job, String group)
            throws IOException {
	    Counters.Group grp = job.getCounters().getGroup(group);
	    long ret = 0;
	    for (Counters.Counter cntr : grp) {
	        ret += cntr.getCounter();
	    }
	    return ret;
	}
	
}
