package net.sanbeicha.eshadoop;

import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.Path;

import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapreduce.Job;
import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat;
import org.elasticsearch.hadoop.mr.EsInputFormat;
import org.elasticsearch.hadoop.mr.LinkedMapWritable;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;

public class E2HJob01 {
    private static Logger LOG = LoggerFactory.getLogger(E2HJob01.class);

    public static void main(String[] args) {
        try {
            Configuration conf = new Configuration();
            conf.setBoolean("mapred.map.tasks.speculative.execution", false);
            conf.setBoolean("mapred.reduce.tasks.speculative.execution", false);
            //ElasticSearch节点
            conf.set("es.nodes", "10.211.55.8:9200");
            //ElaticSearch Index/Type
            conf.set("es.resource", "test_data/test_type/");

            if (args.length != 1) {
                LOG.error("error : " + args.length);
                System.exit(2);
            }
            Job job = Job.getInstance(conf, "JOBE2H01");
            job.setJarByClass(E2HJob01.class);
            job.setInputFormatClass(EsInputFormat.class);
            job.setMapperClass(E2HMapper01.class);
            job.setMapOutputKeyClass(Text.class);
            job.setMapOutputValueClass(LinkedMapWritable.class);

            FileOutputFormat.setOutputPath(job, new Path(args[0]));

            System.out.println(job.waitForCompletion(true));
        } catch (Exception e) {
            LOG.error(e.getMessage(), e);
        }
    }
}
