import org.apache.hadoop.fs.Path
import org.apache.hadoop.hbase.client.ConnectionFactory
import org.apache.hadoop.hbase.{HBaseConfiguration, KeyValue, TableName}
import org.apache.hadoop.hbase.io.ImmutableBytesWritable
import org.apache.hadoop.hbase.mapreduce.HFileOutputFormat2
import org.apache.hadoop.hbase.tool.LoadIncrementalHFiles
import org.apache.hadoop.hbase.util.Bytes
import org.apache.hadoop.mapreduce.Job
import org.apache.spark.serializer.KryoSerializer
import org.apache.spark.sql.SparkSession

/**
 * @Title:
 * @Package
 * @Description:
 * @Author hunter@doitedu.cn
 * @date 2020/8/1913:06     
 */
object BulkLoad {
  def main(args: Array[String]): Unit = {

    //sc.registerKryoClasses(Array(classOf[ImmutableBytesWritable]))

    val spark = SparkSession.builder()
      .appName("")
      .master("local[*]")
      .config("spark.serializer", classOf[KryoSerializer].getName)
      .getOrCreate()



    val rdd = spark.sparkContext.textFile("portrait/testdata/bulk/bulk.csv")

    val rdd2 = rdd.map(line=>{
      val arr = line.split(",")
      (arr(0),arr(2))
    }).sortBy(_._1)
      .map(tp=>{
      val key = new ImmutableBytesWritable(Bytes.toBytes(tp._1))
      val kv = new KeyValue(Bytes.toBytes(tp._1),Bytes.toBytes("f"),Bytes.toBytes("q"),Bytes.toBytes(tp._2))
      (key,kv)
    })



    val conf = HBaseConfiguration.create()
    conf.set("fs.defaultFS","hdfs://doitedu01:8020")
    conf.set("hbase.zookeeper.quorum","doitedu01,doitedu02,doitedu03")
    val conn = ConnectionFactory.createConnection(conf)

    val job = Job.getInstance(conf)

    val table = conn.getTable(TableName.valueOf("test"))
    val locator = conn.getRegionLocator(TableName.valueOf("test"))

    HFileOutputFormat2.configureIncrementalLoad(job,table,locator)
    rdd2.saveAsNewAPIHadoopFile("/hfile/",classOf[ImmutableBytesWritable],classOf[KeyValue],classOf[HFileOutputFormat2],job.getConfiguration)

    new LoadIncrementalHFiles(conf).doBulkLoad(new Path("/hfile"),conn.getAdmin,table,locator)


    conn.close()
    spark.close()
  }

}
