package tul.cot.job;

import java.io.File;
import java.io.IOException;
import java.util.Collection;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.TreeSet;

import org.apache.commons.cli.CommandLine;
import org.apache.commons.cli.CommandLineParser;
import org.apache.commons.cli.HelpFormatter;
import org.apache.commons.cli.Option;
import org.apache.commons.cli.Options;
import org.apache.commons.cli.ParseException;
import org.apache.commons.cli.PosixParser;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.conf.Configured;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.LongWritable;
import org.apache.hadoop.io.NullWritable;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.io.WritableComparable;
import org.apache.hadoop.io.WritableComparator;
import org.apache.hadoop.mapreduce.Counter;
import org.apache.hadoop.mapreduce.Counters;
import org.apache.hadoop.mapreduce.Job;
import org.apache.hadoop.mapreduce.Mapper;
import org.apache.hadoop.mapreduce.Reducer;
import org.apache.hadoop.mapreduce.lib.input.FileInputFormat;
import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat;
import org.apache.hadoop.util.Tool;
import org.apache.hadoop.util.ToolRunner;
import org.apache.log4j.Level;
import org.apache.log4j.Logger;

import tul.cot.MigStats;
import tul.cot.MigStats.Change;
import tul.cot.MigrationTree;
import tul.cot.exception.MissingPropertyException;
import tul.cot.exception.ParseTreeException;
import tul.cot.io.Header;
import tul.cot.io.PlotPoint;
import tul.cot.job.MyCounters.USER_COUNTERS;
import tul.cot.lib.mapper.MigMapper;
import tul.cot.lib.reducer.MigReducer;
import tul.cot.local.GetResults;
import tul.cot.util.Histogram;
import tul.cot.util.HistogramAggregator;
import tul.cot.util.MigAggregator;
import tul.cot.util.Pairs;
import tul.cot.util.StatisticsUtils;
import tul.cot.util.TreeStats;


public class Migrations extends Configured implements Tool, MigProperties {

	private static final Logger LOG = Logger.getLogger(Migrations.class);
	private static Level LOG_LEVEL = Level.INFO;
	static {
//		LOG_LEVEL = Level.DEBUG;
		LOG.setLevel(LOG_LEVEL);
	}
	
	public static final String CSV_SEP = ";";
	public static final String PAIR_SEP = "-->";
	public static final String HIST_PAIR_SEP = "<->>";
	
	private static final float DEFAULT_HPD_LEVEL = 0.95f;
	
	// args
	private int numTrees;
	private double startTime;
	private int numSeg;
	private File pairsFile;
	private Path inDir;
	private Path outDir;
	private File localDir;
	private boolean getToLocal = false;
	private boolean getOnly = false;
	
	private Pairs pairs;

	/**
	 * command line args
	 */
	interface MainArgs {
		String ARG_NUM_TREES	= "numTrees";
		String DESC_NUM_TREES	= "Number of trees";
		
		String ARG_START_TIME	= "startTime";
		String DESC_START_TIME	= "Start time of the first segment";
		
		String ARG_NUM_SEG		= "numSeg";
		String DESC_NUM_SEG		= "Number of segments";
		
		String ARG_PAIRS		= "pairs";
		String DESC_PAIRS		= "Local path to csv file with pairs";
		
		String ARG_IN			= "in";
		String DESC_IN			= "HDFS path to the input dir";
		
		String ARG_OUT			= "out";
		String DESC_OUT			= "HDFS path to the output dir";
		
		String ARG_OUT_LOCAL	= "outLocal";
		String DESC_OUT_LOCAL	= "Local path to the output dir";
		
		String ARG_GET_ONLY		= "getOnly";
		String DESC_GET_ONLY	= "Get results from out HDFS location to local fs";
	}
	
	private static Options	options;
	// adding options
	static {
		options = new Options();
		
		Option optNumTrees		= new Option(MainArgs.ARG_NUM_TREES,	true, MainArgs.DESC_NUM_TREES);
		Option optStartTime		= new Option(MainArgs.ARG_START_TIME,	true, MainArgs.DESC_START_TIME);
		Option optNumSeg		= new Option(MainArgs.ARG_NUM_SEG,		true, MainArgs.DESC_NUM_SEG);
		Option optPairs			= new Option(MainArgs.ARG_PAIRS,		true, MainArgs.DESC_PAIRS);
		Option optInput			= new Option(MainArgs.ARG_IN,			true, MainArgs.DESC_IN);
		Option optOutput		= new Option(MainArgs.ARG_OUT,			true, MainArgs.DESC_OUT);
		Option optLocalOut		= new Option(MainArgs.ARG_OUT_LOCAL,	true, MainArgs.DESC_OUT_LOCAL);
		Option optGetOnly		= new Option(MainArgs.ARG_GET_ONLY,		false, MainArgs.DESC_GET_ONLY);
		
		optNumTrees.setRequired(true);
		optStartTime.setRequired(true);
		optNumSeg.setRequired(true);
		optPairs.setRequired(true);
		optInput.setRequired(true);
		optOutput.setRequired(true);
		optLocalOut.setRequired(false);
		optGetOnly.setRequired(false);
		
		options.addOption(optNumTrees);
		options.addOption(optStartTime);
		options.addOption(optNumSeg);
		options.addOption(optPairs);
		options.addOption(optInput);
		options.addOption(optOutput);
		options.addOption(optLocalOut);
		options.addOption(optGetOnly);
	}
	
	/**
	 * The main {@link Mapper} which processes one tree per map and emits
	 * global-segment and migrations-segment statistics.</br>
	 * Output: &lt;Header(Text, int), PlotPoint(int, double)&gt;
	 */
	public static class TaskMapper extends Mapper<LongWritable, Text, Header, PlotPoint>
		implements MyLabels, MigProperties{
		
		protected static Counter BAD_TREES_CNT;
		protected static Counter NOT_ULTRAMETRIC_CNT;
		
		protected static double	START_TIME	= -1;
		protected static int	NUM_SEG		= 0;
		protected static int	FIRST_SEG	= 1;
		protected static String	PAIRS_STR	= null;
		protected static int 	PERMILS		= 1;
		protected static double	ULTRAMETRIC_EPS	= 10E-9;
		
		protected static Header		outHeader	= new Header();
		protected static PlotPoint	outPP		= new PlotPoint();
		
		protected Logger LOG = Logger.getLogger(getClass());
		{LOG.setLevel(LOG_LEVEL);}
		protected Set<Change> allChanges = new TreeSet<Change>();
		
		@Override
		protected void setup(Context context) {
			
			Configuration conf = context.getConfiguration();
			
			BAD_TREES_CNT = context.getCounter(USER_COUNTERS.BAD_TREES);
			NOT_ULTRAMETRIC_CNT = context.getCounter(USER_COUNTERS.NOT_ULTRAMETRIC_TREES);
			
			START_TIME	= conf.getFloat(CONF_START_TIME, (float) START_TIME);
			NUM_SEG		= conf.getInt(CONF_NUM_SEG, NUM_SEG);
			FIRST_SEG	= conf.getInt(CONF_FIRST_SEG, FIRST_SEG);
			PAIRS_STR	= conf.get(CONF_PAIRS);
			
			PERMILS		= conf.getBoolean(CONF_PERMILS, false) ? 1000 : 1;
			ULTRAMETRIC_EPS = conf.getFloat(CONF_PERMILS, (float) ULTRAMETRIC_EPS);
			 
			validateConf();
			initPairs();
		}
		
		/**
		 * Loads pairs from {@link String} to more suitable {@link Collection}. 
		 */
		private void initPairs() {
			for (String pairStr: PAIRS_STR.split(InitStats.PAIRS_SEP)) {
				
				String[] split = pairStr.split(PAIR_SEP);
				String from	= split[0];
				String to	= split[1];
				
				allChanges.add(new Change(from, to));
				allChanges.add(new Change(to, from));
			}
		}

		/**
		 * Checks correctness of mapper input params.
		 */
		private void validateConf() {
			if (START_TIME <= 0) {
				 throw new IllegalArgumentException("START_TIME <= 0");
			 }
			 
			 if (NUM_SEG <= 0) {
				 throw new IllegalArgumentException("NUM_SEG <= 0");
			 }
			 
			 if (PAIRS_STR == null) {
				 throw new IllegalArgumentException("PAIRS_STR == null");
			 }
		}

		@Override
		protected void map(LongWritable key, Text treeStr, Context context)
			throws IOException, InterruptedException {
			
			LOG.debug(">map");
			
			MigrationTree tree = null;
			
			try {
				tree = new MigrationTree(treeStr.toString());
				tree.setTimes();
				
				LOG.debug(tree.toString());
			} catch (ParseTreeException e) {
				BAD_TREES_CNT.increment(1);
				return;
			}
			
			// emit stats for all input pairs (incl dummy values)
			TreeStats treeStats = new TreeStats(tree, START_TIME, NUM_SEG);
			for (MigStats stats: treeStats) {
				if (stats.segNum >= FIRST_SEG) {
					emit_Stats(context, stats);
				}
			}
			
			LOG.debug("<map");
		}

		/**
		 * Emits global-segment and migrations-segment statistics.
		 */
		protected void emit_Stats(Context context, MigStats stats) throws IOException, InterruptedException {
			
			LOG.debug(stats.toString());
			
			emit_Migrations(context, stats);
			emit_GlobalStats(context, stats);
		}
		
		/**
		 * Emits migrations-segment statistics for all input pairs (including dummy values).
		 */
		private void emit_Migrations(Context context, MigStats stats) throws IOException, InterruptedException {
			
			int totalChanges = stats.getTotalDispersals();
			
			for (Change change: allChanges) {
				double freqFactor = MigAggregator.DUMMY_VALUE;
				
				int cnt = stats.getChangeCounter(change);
				if (cnt != 0) {
					// cnt*PERMILS (result in permils)
					freqFactor = totalChanges != 0 ? ((double)(cnt*PERMILS)) / ((double)totalChanges) : 0;
				}
				
				String label = String.format("%s%s%s", change.from, PAIR_SEP, change.to);
				outHeader.set(label, stats.segNum);
				outPP.set(outHeader, freqFactor);

				LOG.debug(String.format("context.write(%s, %s);", outHeader, outPP));
				context.write(outHeader, outPP);
			}
		}
		
		/**
		 * Emits global-segment statistics, i.e.
		 * REALISED, POTENTIAL, REALISED_TO_POTENTIAL, LINEAGES
		 */
		private void emit_GlobalStats(Context context, MigStats stats) throws IOException, InterruptedException {

			int realised = stats.getRealisedDispersals();
			int potential = stats.getTotalDispersals();
			//TODO [?] potential == 0 (NaN)
			double r2p = (potential == 0) ? 0 : ((double) realised) / ((double) potential);
			int out = stats.getOutLineages();
			
			// emit number of realised dispersals
			outHeader.set(REALISED, stats.segNum);
			outPP.set(outHeader, realised);
			context.write(outHeader, outPP);
			
			// emit number of potential dispersals
			outHeader.set(POTENTIAL, stats.segNum);
			outPP.set(outHeader, potential);
			context.write(outHeader, outPP);
			
			// emit realised to potential
			outHeader.set(REALISED_TO_POTENTIAL, stats.segNum);
			outPP.set(outHeader, r2p);
			context.write(outHeader, outPP);
			
			// emit number of lineages
			outHeader.set(LINEAGES, stats.segNum);
			outPP.set(outHeader, out);
			context.write(outHeader, outPP);
		}
		
		protected void emit_DAI(Context context, MigStats stats) throws IOException, InterruptedException {
			//emit output only for one direction (from < to)
			for (Change change: allChanges) {
				if (change.from.compareTo(change.to) < 0) {
					Change opposite = new Change(change.to, change.from);
					int chCnt = stats.getChangeCounter(change);
					int opCnt = stats.getChangeCounter(opposite);
					
					double DAI = StatisticsUtils.getDAI(chCnt, opCnt);
					
					String label = String.format("%s%s%s", change.from, PAIR_SEP, change.to);
					outHeader.set(label, -1);
					outPP.set(outHeader, DAI);

					LOG.debug(String.format("context.write(%s, %s);", outHeader, outPP));
					context.write(outHeader, outPP);
				}
			}
		}
	}
	
	/**
	 * The main {@link Reducer}. For each reduce group (single lablel) it aggregates
	 * values in every segment (median, hpd_min, hpd_max).</br>
	 * The final output has the following format: (chart_name);(value_type);(values).
	 */
	public static class TaskReducer extends Reducer<Header, PlotPoint, NullWritable, Text>
		implements MyLabels, MigProperties {
		
		protected static int NUM_TREES = 0;
		protected static final int NUM_BUCKETS = 20;
		protected static float HPD_CONF_LEVEL = 0.9f;
		
		protected static final NullWritable outKey = NullWritable.get();
		protected static Text outValue = new Text();
		
		protected Logger LOG = Logger.getLogger(getClass());
		{LOG.setLevel(LOG_LEVEL);}
		
		@Override
		protected void setup(Context context) throws IOException ,InterruptedException {
			NUM_TREES		= context.getConfiguration().getInt(CONF_NUM_TREES, 0);
			HPD_CONF_LEVEL	= context.getConfiguration().getFloat(CONF_HPD_LEVEL, 0.9f);
			 
			 if (NUM_TREES <= 0)
				 throw new IllegalArgumentException("NUM_TREES <= 0");
			 
			 if (HPD_CONF_LEVEL < 0f)
				 throw new IllegalArgumentException("HPD_CONF_LEVEL < 0f");
			 if (HPD_CONF_LEVEL > 1f)
				 throw new IllegalArgumentException("HPD_CONF_LEVEL > 1f");
		}
		
		@Override
		protected void reduce(Header key, Iterable<PlotPoint> values, Context context) throws IOException ,InterruptedException {
			
			MigAggregator aggregator = new MigAggregator(NUM_TREES, HPD_CONF_LEVEL);
			aggregator.aggregate(values);
			
			List<Double> medianValues = aggregator.getMedianValues();
			List<Double> hpdMinValues = aggregator.getHpdMinValues();
			List<Double> hpdMaxValues = aggregator.getHpdMaxValues();

			emit_Values(context, key, MEDIAN, medianValues);
			emit_Values(context, key, HPD_MIN, hpdMinValues);
			emit_Values(context, key, HPD_MAX, hpdMaxValues);
		}

		protected void emit_Values(Context context, Header header, String label, List<Double> values) throws IOException, InterruptedException {
			StringBuilder sb = new StringBuilder();
			
			sb.append(header.getLabel());
			sb.append(CSV_SEP);
			sb.append(label);
			for (double value: values) {
				sb.append(CSV_SEP);
				sb.append(value);
			}
			
			outValue.set(sb.toString());
			context.write(outKey, outValue);
		}
		
		protected void emit_Histogram(Context context, Histogram hist) throws IOException, InterruptedException {
			
			StringBuilder sb = new StringBuilder();
			sb.append(HISTOGRAM);
			sb.append(CSV_SEP);
			sb.append(hist.getLabel());
			
			// CURR: append counts (PREV: append density values)
			for (int value: hist.getCounts()) {
				sb.append(CSV_SEP);
				sb.append(value);
			}
			
			LOG.debug("Emit histogram:\n" + hist);
			
			outValue.set(sb.toString());
			context.write(outKey, outValue);
		}
	}
	
	/**
	 * Grouping comparator for {@link Header} class. All records with the same label belongs to the single group.
	 */
	public static class HeaderGroupper extends WritableComparator {

		protected HeaderGroupper() {
			//keyClass, createInstances(slower - creating java objects instead of using binary data)
			super(Header.class, true);
		}
		
		@Override
		public int compare(WritableComparable w1, WritableComparable w2) {
			
			Header h1 = (Header) w1;
			Header h2 = (Header) w2;
			
			if (h1.getSegment() < 0 || h2.getSegment() < 0) {
				return h1.compareSegment(h2);
			}
			
			return h1.compareLabel(h2);
		}
		
	}
	
	
	@Override
	public int run(String[] args) throws Exception {
		
		loadArgs(args);
		validateArgs();
		init();
		
		warnUnexpectedConf();
		infoInitialConf();
		setupConf();
		validateConf();
		
		if (!getOnly) {
			
			printWarns();
			
			String jobName = String.format("%s: numTrees(%d) startTime(%s) numSeg(%d) pairsFile(%s) hpdLevel(%s) in(%s) out(%s)",
					getClass().getSimpleName(),
					numTrees, Double.toString(startTime), numSeg, pairsFile.getName(),
					Float.toString(getConf().getFloat(CONF_HPD_LEVEL, DEFAULT_HPD_LEVEL)),
					inDir.getName(), outDir.getName());
			
			Job job = new Job(getConf(), jobName);
			job.setJarByClass(getClass());
			
			FileInputFormat.addInputPath(job, inDir);
			FileOutputFormat.setOutputPath(job, outDir);
	
			job.setMapperClass(MigMapper.class);
			job.setMapOutputKeyClass(Header.class);
			job.setMapOutputValueClass(PlotPoint.class);
	
			job.setReducerClass(MigReducer.class);
			job.setOutputKeyClass(NullWritable.class);
			job.setOutputValueClass(Text.class);
			
			job.setGroupingComparatorClass(HeaderGroupper.class);
			
			boolean success = job.waitForCompletion(true);
			
			if (!success)
				return 1;
			
			
			// validate counters
			Counters counters = job.getCounters();
			Counter MIR_CNT = counters.findCounter("org.apache.hadoop.mapred.Task$Counter", "MAP_INPUT_RECORDS");
			Counter BT_CNT = counters.findCounter(USER_COUNTERS.BAD_TREES);
			
			long inputTrees = MIR_CNT.getValue();
			long badTrees = BT_CNT.getValue();
			long analyzedTrees = inputTrees - badTrees;
			
			LOG.info(String.format("Job analyzed %d trees", analyzedTrees));
			if (analyzedTrees != numTrees) {
				LOG.error(String.format("Job have to be run again with option -numTrees %d", analyzedTrees));
				return 1;
			}
		}
		
		LOG.info(String.format("HDFS out:\t%s", outDir.toString()));
		
		if (getToLocal) {
			boolean fsLocal = getConf().get("fs.default.name").startsWith("file");
			
			GetResults getResults = new GetResults(
					getConf(),
					pairs, startTime, numSeg, TaskReducer.NUM_BUCKETS,
					new File(outDir.toString()), localDir);
			getResults.run();
			
			LOG.info(String.format("FS out:\t%s", localDir.getAbsolutePath()));
		}
		
		return 0;
	}
	
	private void printWarns() {
		if (! LOG_LEVEL.equals(Level.INFO)) {
			LOG.warn("LOG_LEVEL == " + LOG_LEVEL);
		}
	}

	/**
	 * Parse command line options (args).
	 * @param args
	 * @throws IOException 
	 */
	private void loadArgs(String[] args) throws IOException {
		CommandLineParser parser = new PosixParser();
		CommandLine cmd = null;
		
		try {
			cmd = parser.parse(options, args);
		} catch (ParseException e) {
			System.err.println(e.getMessage());
			
			HelpFormatter formatter = new HelpFormatter();
			formatter.printHelp(getClass().getSimpleName(), options);
			
			System.exit(1);
		}
		
		numTrees	= Integer.parseInt(cmd.getOptionValue(MainArgs.ARG_NUM_TREES));
		startTime	= Double.parseDouble(cmd.getOptionValue(MainArgs.ARG_START_TIME));
		numSeg		= Integer.parseInt(cmd.getOptionValue(MainArgs.ARG_NUM_SEG));
		pairsFile	= new File (cmd.getOptionValue(MainArgs.ARG_PAIRS));
		inDir		= new Path(cmd.getOptionValue(MainArgs.ARG_IN));
		outDir		= new Path(cmd.getOptionValue(MainArgs.ARG_OUT));
		
		if (cmd.hasOption(MainArgs.ARG_OUT_LOCAL)) {
			localDir	= new File (cmd.getOptionValue(MainArgs.ARG_OUT_LOCAL));
			getToLocal	= true;
		} else {
			getToLocal	= false;
		}
		
		getOnly = cmd.hasOption(MainArgs.ARG_GET_ONLY);
	}
	
	private void warnUnexpectedConf() {
		StringBuilder msg = new StringBuilder();
		msg.append("Unexpected configuration properties:\n");
		
		Configuration conf = getConf();
		
		Set<String> unexpProps = new HashSet<String>();
		for (String prop: UNEXPECTED_PROPERTIES) {
			unexpProps.add(prop);
		}
		
		boolean foundUnexpectedConf = false;
		for (Map.Entry<String, String> entry: conf) {
			String prop = entry.getKey();
			String value = entry.getValue();
			
			if (unexpProps.contains(prop)) {
				foundUnexpectedConf = true;
				msg.append(String.format("\t%s=%s\n", prop, value));
			}
		}
		
		if (foundUnexpectedConf)
			LOG.warn(msg);
	}
	
	private void infoInitialConf() {
		StringBuilder msg = new StringBuilder();
		msg.append("Initial configuration values:\n");
		
		Configuration conf = getConf();
		
		Set<String> legalProps = new HashSet<String>();
		for (String prop: ALL_PROPERTIES) {
			legalProps.add(prop);
		}
		
		for (Map.Entry<String, String> entry: conf) {
			String prop = entry.getKey();
			String value = entry.getValue();
			
			if (legalProps.contains(prop)) {
				msg.append(String.format("\t%s=%s\n", prop, value));
			}
		}
		
//		msg.append("\n");
		LOG.info(msg);
	}
	
	private void setupConf() {
		final String PROP_FROM_ARGS = "\t%s=%s\t[-%s]\n";
		final String PROP_DEFAULT	= "\t%s=%s\t[default]\n";
		
		StringBuilder msg = new StringBuilder();
		msg.append("Setting properties:\n");
		
		Configuration conf = getConf();
		
		//set properties from args
		msg.append(String.format(PROP_FROM_ARGS, CONF_NUM_TREES, numTrees, MainArgs.ARG_NUM_TREES));
		conf.setInt(CONF_NUM_TREES, numTrees);
		
		msg.append(String.format(PROP_FROM_ARGS, CONF_START_TIME, startTime, MainArgs.ARG_START_TIME));
		conf.setFloat(CONF_START_TIME, (float) startTime);
		
		msg.append(String.format(PROP_FROM_ARGS, CONF_NUM_SEG, numSeg, MainArgs.ARG_NUM_SEG));
		conf.setInt(CONF_NUM_SEG, numSeg);
		
		conf.set(CONF_PAIRS, pairs.getPairsStr());
		
		//set default properties
		if (conf.get(CONF_HPD_LEVEL) == null) {
			msg.append(String.format(PROP_DEFAULT, CONF_HPD_LEVEL, DEFAULT_HPD_LEVEL));
			conf.setFloat(CONF_HPD_LEVEL, DEFAULT_HPD_LEVEL);
		}
		
		if (conf.get(CONF_PERMILS) == null) {
			final boolean DEFAULT_PERMILS = true;
			msg.append(String.format(PROP_DEFAULT, CONF_PERMILS, DEFAULT_PERMILS));
			conf.setBoolean(CONF_PERMILS, DEFAULT_PERMILS);
		}
		
		LOG.info(msg);
	}

	private void validateArgs() {
		if (numTrees <= 0)
			throw new IllegalArgumentException("numTrees <= 0");

		if (startTime < 0)
			throw new IllegalArgumentException("startTime < 0");

		if (numSeg <= 0)
			throw new IllegalArgumentException("numSeg <= 0");

		if (!pairsFile.isFile())
			throw new IllegalArgumentException(String.format(
					"pairsFile (%s) does not exist",
					pairsFile.getAbsolutePath()));

		if (getToLocal) {
			if (localDir == null)
				throw new IllegalArgumentException("localDir == null");

			if (localDir.exists())
				throw new IllegalArgumentException(String.format(
						"localDir dir/file (%s) already exist",
						localDir.getAbsolutePath()));
		}
	}
	
	private void init() throws IOException {
		//load pairs
		pairs = new Pairs();
		pairs.addPairs(pairsFile);
	}
	
	/**
	 * Validates configuration
	 * @throws MissingPropertyException 
	 */
	private void validateConf() throws MissingPropertyException {
		Configuration conf = getConf();
		
		{
			checkIsSet(CONF_NUM_TREES);
			int value = conf.getInt(CONF_NUM_TREES, 0);
			if (value <= 0)
				throw new IllegalArgumentException(String.format("%s=%s <= 0",
						CONF_NUM_TREES, conf.get(CONF_NUM_TREES)));
		}
		
		{
			checkIsSet(CONF_START_TIME);
			float value = conf.getFloat(CONF_START_TIME, 0); 
			if (value <= 0)
				throw new IllegalArgumentException(String.format("%s=%s <= 0",
						CONF_START_TIME, conf.get(CONF_START_TIME)));
		}
		
		{
			checkIsSet(CONF_NUM_SEG);
			int value = conf.getInt(CONF_NUM_SEG, 0); 
			if (value <= 0)
				throw new IllegalArgumentException(String.format("%s=%s <= 0",
						CONF_NUM_SEG, conf.get(CONF_NUM_SEG)));
		}
		
		{
			checkIsSet(CONF_HPD_LEVEL);
			float value = conf.getFloat(CONF_HPD_LEVEL, -1);
			if (value < 0 || value > 1)
				throw new IllegalArgumentException(String.format("%s=%s not in range [0,1]",
						CONF_HPD_LEVEL, conf.get(CONF_HPD_LEVEL)));
		}
		
		//TODO: other props
	}

	private void checkIsSet(String prop) throws MissingPropertyException {
		if (getConf().get(prop) == null)
			throw new MissingPropertyException(prop);
	}

	public static void main(String[] args) throws Exception {
		int ret = ToolRunner.run(new Migrations(), args);
		System.exit(ret);
	}

}

