package com.sys.tdhclient.startapp

import java.util.Properties

import com.sys.tdhclient.utils.{HBaseUtils, SparkSc}
import org.apache.hadoop.hbase.client.Put
import org.apache.hadoop.hbase.mapreduce.TableOutputFormat
import org.apache.hadoop.hbase.util.Bytes
import org.apache.spark.SparkContext
import org.apache.spark.sql.SparkSession

object SparkWtHbasePut {
  private val sparkContext: SparkContext = SparkSc.getSparkContext()
  private val sparkSession: SparkSession = SparkSc.getSparkSession()
  private val properties: Properties = SparkSc.getProperties()
  private val tableName: String = properties.getProperty("tableName")
  private val quorum: String = properties.getProperty("quorum")
  private val port: String = properties.getProperty("port")

  // 配置相关信息

  val conf = HBaseUtils.getHBaseConfiguration()
  conf.set(TableOutputFormat.OUTPUT_TABLE,tableName)


  val indataRDD = sparkContext.makeRDD(Array("002,10","003,10","004,50"))

  indataRDD.foreachPartition(x=> {
    val conf = HBaseUtils.getHBaseConfiguration()
    conf.set(TableOutputFormat.OUTPUT_TABLE,tableName)
    val htable = HBaseUtils.getTable(conf,tableName)

    x.foreach(y => {
      val arr = y.split(",")
      val key = arr(0)
      val value = arr(1)

      val put = new Put(Bytes.toBytes(key))
      put.add(Bytes.toBytes("info"),Bytes.toBytes("clict_count"),Bytes.toBytes(value))
      htable.put(put)
    })
  })

  sparkContext.stop()

}
