package com.zachary.es;

import com.sdyc.ndetl.util.HbaseTableName;
import com.sdyc.ndetl.util.UserStoreHelper;
import com.zachary.es.hadoop.map.IndexMapper;
import com.zachary.es.hadoop.map.SearchMapper;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.HBaseConfiguration;
import org.apache.hadoop.hbase.client.Scan;
import org.apache.hadoop.hbase.mapreduce.TableMapReduceUtil;
import org.apache.hadoop.hbase.util.Bytes;
import org.apache.hadoop.io.NullWritable;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapreduce.Job;
import org.elasticsearch.hadoop.mr.EsOutputFormat;

import java.io.IOException;

/**
 * <pre>
 * Created with IntelliJ IDEA.
 * User: zachary.
 * Date: 2014/7/15
 * Time: 17:29
 * PC：windows'IDEA in company <br>
 * </pre>
 *
 * @author zachary.
 */
public class IndexApp {
    public static void main(String[] args) throws IOException, ClassNotFoundException, InterruptedException {
        Configuration conf = HBaseConfiguration.create();
        conf.set("es.nodes", "192.168.1.109");
        conf.set("es.port", "9200");
        conf.set("es.input.json", "yes");
        conf.set("es.resource", "post/basic");
        //设置主键_id为输出的某一个字段，这里是postUrn
        conf.set("es.mapping.id", "postUrn");
        Scan scan = new Scan();
        scan.setStartRow(Bytes.toBytes("195861-3401276643918227"));
        scan.setStopRow(Bytes.toBytes("195861-3431543722509865" + 1));
        scan.addFamily(UserStoreHelper.FAMILY_INFO);
        Job job = Job.getInstance(conf);
        job.setJobName("esIndex");
        job.setJarByClass(SearchMapper.class);

        TableMapReduceUtil.initTableMapperJob(HbaseTableName.POST_TABLE.getTableName(),
                scan, IndexMapper.class, NullWritable.class,
                Text.class,
                job);

        job.setOutputFormatClass(EsOutputFormat.class);
        job.setNumReduceTasks(0);
        System.exit(job.waitForCompletion(true) ? 0 : -1);
    }
}
