package com.packtpub.esh;

import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.IntWritable;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapreduce.Job;
import org.apache.hadoop.mapreduce.lib.input.FileInputFormat;
import org.elasticsearch.hadoop.mr.EsOutputFormat;

/**
* Driver这个组件负责提供Elasticsearch的节点、索引等作业运行需要的配置信息
* 也用来配置使用哪个Mapper和Reducer类和输入输出的格式、路径等信息
*/
public class Driver {

	public static void main(String[] args) throws Exception {
		Configuration conf = new Configuration();
        // ElasticSearch Server nodes to point to
		conf.set("es.nodes", "192.168.211.108:9200");
        // ElasticSearch index and type name in {indexName}/{typeName} format
		conf.set("es.resource", "eshadoop/wordcount");

        // Create Job instance
		Job job = new Job(conf, "word count");
        // set Driver class
		job.setJarByClass(Driver.class);
        job.setMapperClass(WordsMapper.class);
        job.setReducerClass(WordsReducer.class);
        job.setOutputKeyClass(Text.class);
        job.setOutputValueClass(IntWritable.class);
		
		//ES-Hadoop库提供了单独的EslnputFormat和EsOutputFormat
		//它们扮演了Elasticsearch需要的JSON格式之间的适配器角色。
		//默认情况下, MapWritable对象被写入context对象。然后EsOutputFormat将MapWritable对象转换为JSON格式
        // set OutputFormat to EsOutputFormat provided by ElasticSearch-Hadoop jar
	    job.setOutputFormatClass(EsOutputFormat.class);

        FileInputFormat.addInputPath(job, new Path(args[0]));

        System.exit(job.waitForCompletion(true) ? 0 : 1);
	}

}
