package com.zhao

import org.apache.hadoop.hbase.HBaseConfiguration
import org.apache.hadoop.hbase.client.{Result, Scan}
import org.apache.hadoop.hbase.io.ImmutableBytesWritable
import org.apache.hadoop.hbase.mapred.TableOutputFormat
import org.apache.hadoop.hbase.mapreduce.TableInputFormat
import org.apache.hadoop.hbase.protobuf.ProtobufUtil
import org.apache.hadoop.hbase.util.Bytes
import org.apache.log4j.{Level, Logger}
import org.apache.spark.SparkContext
import org.apache.spark.sql.SparkSession

import java.util.Base64

/**
 * Description: 使用newAPIHadoopRDD从hbase中读取数据,可以通过scan过滤数据<br/>
 * Copyright (c) ，2021 ， 赵 <br/>
 * A wet person does not fear the rain. <br/>
 * Date： 2021/2/4 15:00
 *
 * @author 柒柒
 * @version : 1.0
 */

object readFromHBaseWithHbaseNewAPIScan {
  def main(args: Array[String]): Unit = {

    //屏蔽不必要的日志显示在终端上
    Logger.getLogger("org.apache.spark").setLevel(Level.WARN)
    val sparkSession: SparkSession = SparkSession
      .builder()
      .appName(this.getClass.getSimpleName)
      .master("local[*]")
      .getOrCreate()

    val sc: SparkContext = sparkSession.sparkContext

    val tableName = "test"
    val hbaseConf = HBaseConfiguration.create()
    hbaseConf.set("hbase.zookeeper.quorum", "node01,node02,node03")//zookeeper集群
    hbaseConf.set("hbase.zookeeper.property.clientPort", "2181")//zookeeper连接端口
    hbaseConf.set(TableOutputFormat.OUTPUT_TABLE,tableName)

    val scan = new Scan()
    scan.addFamily(Bytes.toBytes("cf1"))
    val proto = ProtobufUtil.toScan(scan)
    val scanToString: String = new String(Base64.getEncoder.encode(proto.toByteArray))
    hbaseConf.set(org.apache.hadoop.hbase.mapreduce.TableInputFormat.SCAN,scanToString)

    //读取数据并转化为rdd
    val hbaseRDD = sc.newAPIHadoopRDD(hbaseConf, classOf[TableInputFormat],
      classOf[ImmutableBytesWritable], classOf[Result])

    hbaseRDD
      .map(x => x._2)
      .map{result =>
        (result.getRow,result.getValue(Bytes.toBytes("cf1"),Bytes.toBytes("name")),
        result.getValue(Bytes.toBytes("cf1"),Bytes.toBytes("age")))
      }.map(row =>(new String(row._1),new String(row._2),new String(row._3)))
      .collect()
      .foreach(r => (println("rowKey:"+r._1+",name:"+r._2+",age:"+r._3)))


  }
}












