package com.run.fjy.mr;

import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.io.MapWritable;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapreduce.Job;
import org.apache.hadoop.mapreduce.lib.input.SequenceFileInputFormat;
import org.elasticsearch.hadoop.mr.EsOutputFormat;

import com.run.bcpimp.mr.BcpExtractMR;
import com.run.bcpimp.mr.io.ExtractTableValueWritable;
import com.run.bcpimp.util.RunTool;

// Referenced classes of package com.run.fjy.mr:
//			PerDayCombiner, PerDayReducer

public class PerDay extends RunTool
{

	public PerDay()
	{
	}

	public int exec(String arg0[])
		throws Exception
	{
		Configuration conf = getConf();
		conf.setBoolean("mapred.map.tasks.speculative.execution", false);
		conf.setBoolean("mapred.reduce.tasks.speculative.execution", false);
		conf.setInt("es.batch.size.entries", 8000);
		conf.set("es.nodes", "192.168.17.30:9200");
		conf.set("es.resource.write", (new StringBuilder()).append("statis_").append(conf.get("statis.unit", "day")).append("/{DATE}").toString());
		conf.set("cfgfile.bcpextract", (new StringBuilder()).append("conf/bcpextract_").append(conf.get("statis.unit", "day")).append(".xml").toString());
		Job job = new Job(conf);
		job.setJarByClass(PerDay.class);
		job.setMapperClass(BcpExtractMR.MyMapperBase.class);
		job.setCombinerClass(PerDayCombiner.class);
		job.setReducerClass(PerDayReducer.class);
		job.setMapOutputKeyClass(Text.class);
		job.setMapOutputValueClass(ExtractTableValueWritable.class);
		job.setOutputValueClass(MapWritable.class);
		job.setInputFormatClass(SequenceFileInputFormat.class);
		setInOutPaths(job, arg0);
		job.setOutputFormatClass(EsOutputFormat.class);
		System.exit(job.waitForCompletion(true) ? 0 : 1);
		return 0;
	}

	protected int getArgumentNumber()
	{
		return 1;
	}
}