package com.ke.hbasetohdfs;

import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hbase.client.Scan;
import org.apache.hadoop.hbase.mapreduce.TableMapReduceUtil;
import org.apache.hadoop.io.IntWritable;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapreduce.Job;
import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat;

import java.util.ArrayList;
import java.util.List;

public class HBaseToHdfs {

    public static void main(String[] args) throws Exception {
        Configuration configuration = new Configuration(true);

        // 使用hbase,需要加载zookeeper配置
        configuration.set("hbase.zookeeper.quorum", "ke02,ke03,ke04");
        //让框架知道是windows异构平台运行
        configuration.set("mapreduce.app-submission.cross-platform", "true");
        //configuration.set("mapreduce.framework.name", "local");

        //创建job对象
        Job job = new Job(configuration);
        job.setJarByClass(HBaseToHdfs.class);
        job.setJar("D:\\code\\mayun_hadoop\\test\\hbase\\target\\hive-1.0-SNAPSHOT.jar");

        Scan scan = new Scan();
        scan.setCaching(500);
        scan.setCacheBlocks(false);

        TableMapReduceUtil.initTableMapperJob("test", scan, MyMapper.class, null, null, job);


        job.setReducerClass(MyReducer.class);
        job.setNumReduceTasks(1);
        FileOutputFormat.setOutputPath(job, new Path("/data/output18"));


        job.waitForCompletion(true);
    }
}
