package useTask.hdfs_to_hbase.hdfsMode;

import hadoop.unit.GlobalConfiguration;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hbase.HBaseConfiguration;
import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.client.*;
import org.apache.hadoop.hbase.io.ImmutableBytesWritable;
import org.apache.hadoop.hbase.mapreduce.HFileOutputFormat2;
import org.apache.hadoop.mapreduce.Job;
import org.apache.hadoop.mapreduce.lib.input.FileInputFormat;
import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat;

public class driver {
    private static Configuration conf = GlobalConfiguration.getCon();

    // 运行任务
    public static void main(String[] args) throws Exception {

        Job job = Job.getInstance(conf, "Hbase_Job");

        job.setJarByClass(driver.class);
        job.setMapperClass(mapper.class);
        // 不用写reduce，但需要设置reduceTask个数为0
        job.setNumReduceTasks(0);

        // 此时，map的输出就是最终输出
        job.setMapOutputKeyClass(ImmutableBytesWritable.class);
        job.setMapOutputValueClass(Put.class);

        Path in_path = new Path("hdfs://master:1920/hadoop/Test/RestaurantDemo/demo3/chengji.csv");
        FileInputFormat.setInputPaths(job, in_path);

        // 生成一个hfile时需要指定表面，列柱
        Configuration hconf = HBaseConfiguration.create();
        Connection hconn = ConnectionFactory.createConnection(hconf);
        Table table = hconn.getTable(TableName.valueOf("bigData2231"));
        RegionLocator regionLocator = hconn.getRegionLocator(table.getName());
        HFileOutputFormat2.configureIncrementalLoad(job,table,regionLocator);

        // 设置输出文件格式为hbase格式
        job.setOutputFormatClass(HFileOutputFormat2.class);

        Path out_path = new Path("hdfs://master:1920/hadoop/Test/RestaurantDemo/demo07");
        FileOutputFormat.setOutputPath(job, out_path);

        FileSystem fs = FileSystem.get(conf);
        if(fs.exists(out_path)){
            fs.delete(out_path,true);
        }

        int case_ = job.waitForCompletion(true) ? 0 : 2; // 0为正常退出,2为异常退出
        System.exit(case_);
    }

}
