package com.zhanghe.study.demo.es;

import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.NullWritable;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapreduce.Job;
import org.apache.hadoop.mapreduce.lib.input.FileInputFormat;
import org.elasticsearch.hadoop.mr.EsOutputFormat;

import java.text.DateFormat;
import java.text.SimpleDateFormat;
import java.util.Date;


/**
 * @author zh
 * @date 2022/3/31 17:23
 */
public class Hadoop2EsJob {

    public static void main(String[] args) {
        try {
            Configuration conf = new Configuration();
            conf.setBoolean("mapred.map.tasks.speculative.execution", false);
            conf.setBoolean("mapred.reduce.tasks.speculative.execution", false);

            //ElasticSearch节点
            conf.set("es.nodes", "192.168.1.220:9200");
            //ElaticSearch Index/Type
            conf.set("es.resource", "mydata01/adCampaign/");

            conf.set("es.index.auto.create","false");
            //Hadoop上的数据格式为JSON,可以直接导入
            conf.set("es.input.json", "yes");
            // 指定json对象中那种field对应的值为es中document的id
            conf.set("es.mapping.id", "id");
//            if (args.length != 1) {
//                System.exit(2);
//            }

            DateFormat simpleDateFormat = new SimpleDateFormat("yyyyMMdd");


            String date = simpleDateFormat.format(new Date());

            Job job = Job.getInstance(conf, "jobh2e01");
            job.setJarByClass(Hadoop2EsJob.class);
            job.setMapperClass(Hadoop2EsMapper.class);
            job.setMapOutputKeyClass(NullWritable.class);
            job.setMapOutputValueClass(Text.class);
            job.setOutputFormatClass(EsOutputFormat.class);

            FileInputFormat.addInputPath(job, new Path("hdfs://localhost:9000/user/hive/warehouse/study_hive.db/adCampaign/"));

            System.out.println(job.waitForCompletion(true));
        } catch (Exception e) {
            e.printStackTrace();
        }
    }
}
