package com.hbase.export.hfile.java;

import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.HBaseConfiguration;
import org.apache.hadoop.hbase.KeyValue;
import org.apache.hadoop.hbase.client.Result;
import org.apache.hadoop.hbase.io.ImmutableBytesWritable;
import org.apache.hadoop.hbase.mapreduce.TableInputFormat;
import org.apache.spark.SparkConf;
import org.apache.spark.api.java.JavaPairRDD;
import org.apache.spark.api.java.JavaRDD;
import org.apache.spark.api.java.JavaSparkContext;
import org.apache.spark.api.java.function.FlatMapFunction;
import org.apache.spark.api.java.function.Function;
import org.apache.spark.rdd.RDD;
import org.apache.spark.sql.SparkSession;
import scala.Function1;
import scala.Tuple2;

import java.util.Iterator;

public class ExportHbaseTableToHfile {

    /**
     * hbase配置信息
     * @param zkQuorum
     * @param zkPort
     * @param tableName
     * @return
     */
    private static Configuration getHBaseConfiguration(String zkQuorum, String zkPort, String tableName){
        Configuration hbaseConf = HBaseConfiguration.create();
        hbaseConf.set("hbase.zookeeper.quorum", zkQuorum);
        hbaseConf.set("hbase.zookeeper.property.clientPort", zkPort);
        hbaseConf.set(org.apache.hadoop.hbase.mapreduce.TableInputFormat.INPUT_TABLE, tableName);
        return hbaseConf;
    }


    public static void main(String[] args) {
        // Define SparkContext
        SparkConf sparkConf = new SparkConf().setAppName("Spark-Hbase").setMaster("local[0]");
        JavaSparkContext javaSparkContext = new JavaSparkContext(sparkConf);
        // Conf with Hbase
        Configuration hbaseConf = getHBaseConfiguration("", "2181", "");
        // Read data using spark
        JavaPairRDD<ImmutableBytesWritable, Result> hBaseRDD = javaSparkContext.newAPIHadoopRDD(hbaseConf, TableInputFormat.class, ImmutableBytesWritable.class, Result.class);


        JavaRDD<Tuple2<ImmutableBytesWritable, KeyValue>> a = hBaseRDD.map(new Function<Tuple2<ImmutableBytesWritable, Result>, Tuple2<ImmutableBytesWritable, KeyValue>>() {
            @Override
            public Tuple2<ImmutableBytesWritable, KeyValue> call(Tuple2<ImmutableBytesWritable, Result> v1) throws Exception {
                return null;
            }
        });


        JavaRDD<Tuple2<ImmutableBytesWritable, KeyValue>> b = hBaseRDD.flatMap(new FlatMapFunction<Tuple2<ImmutableBytesWritable, Result>, Tuple2<ImmutableBytesWritable, KeyValue>>() {
            @Override
            public Iterator<Tuple2<ImmutableBytesWritable, KeyValue>> call(Tuple2<ImmutableBytesWritable, Result> immutableBytesWritableResultTuple2) throws Exception {
                return null;
            }
        });




//        SparkSession sparkSession = SparkSession.builder().appName("SparkHBaseRDD").getOrCreate();
//        Configuration hbaseConf = getHBaseConfiguration("", "2181", "");
//
//        RDD<Tuple2<ImmutableBytesWritable, Result>> hBaseRDD1 = sparkSession.sparkContext().newAPIHadoopRDD(hbaseConf,
//                TableInputFormat.class,
//                ImmutableBytesWritable.class,
//                Result.class
//        );


    }

}
