package com.sys.tdhclient.startapp

import java.util.Properties

import com.sys.tdhclient.startapp.SparkCrudHbase.properties
import com.sys.tdhclient.utils.{HBaseUtils, SparkSc}
import org.apache.hadoop.hbase.mapreduce.TableInputFormat
import org.apache.hadoop.hbase.util.Bytes
import org.apache.spark.SparkContext
import org.apache.spark.sql.SparkSession


object SparkRdHyperbase {
  private val sparkContext: SparkContext = SparkSc.getSparkContext()
  private val sparkSession: SparkSession = SparkSc.getSparkSession()
  private val properties: Properties = SparkSc.getProperties()
  private val tableName: String = properties.getProperty("tableName")
  private val quorum: String = properties.getProperty("quorum")
  private val port: String = properties.getProperty("port")
  def main(args: Array[String]): Unit = {
    val tableName = "imooc_course_clickcount"
    val quorum = "10.150.96.1,10.150.96.2,10.150.96.6"
    val port = "2181"
    // 配置相关信息
    val conf = HBaseUtils.getHBaseConfiguration()
    conf.set(TableInputFormat.INPUT_TABLE,tableName)
    // HBase数据转成RDD
    val hBaseRDD = sparkContext.newAPIHadoopRDD(conf,classOf[TableInputFormat],
      classOf[org.apache.hadoop.hbase.io.ImmutableBytesWritable],
      classOf[org.apache.hadoop.hbase.client.Result]).cache()

    // RDD数据操作
    val data = hBaseRDD.map(x => {
      val result = x._2
      val key = Bytes.toString(result.getRow)
      val value = Bytes.toString(result.getValue("info".getBytes,"click_count".getBytes))
      (key,value)
    })

    data.foreach(println)
    sparkContext.stop()
  }

}
