package com.shujia.util

import org.apache.hadoop.conf.Configuration
import org.apache.hadoop.hbase.client.{Connection, ConnectionFactory, Put, Table}
import org.apache.hadoop.hbase.{HBaseConfiguration, TableName}
import org.apache.spark.sql.DataFrame

object HBaseUtil {
  /**
   * 获取hbase数据库连接
   *
   * @return
   */
  def getConnection: Connection = {
    val configuration: Configuration = HBaseConfiguration.create()
    //指定zookeeper地址
    configuration.set("hbase.zookeeper.quorum", "master:2181,node1:2181,node2:2181")
    val connection: Connection = ConnectionFactory.createConnection(configuration)
    connection
  }


  /**
   * 将标签数据保存到hbase
   * @param userTag:标签
   * @param fieldName：标签名
   */
  def userTagToHBase(userTag: DataFrame, fieldName: String): Unit = {
    //将标签结果保存到hbase
    userTag.foreachPartition(rows => {
      //创建数据库连接
      val connection: Connection = HBaseUtil.getConnection

      //获取表对象
      //需要先在hbase中创建表userprofile：  create 'userprofile','info'
      val table: Table = connection.getTable(TableName.valueOf("userprofile"))

      rows.foreach(row => {
        //将数据保存到hbase中
        val id: Object = row.getAs("id")
        val field: Object = row.getAs(fieldName)

        //创建put对象,用户编号作为rowkey
        val put = new Put(id.toString.getBytes)
        //增加列
        put.addColumn("info".getBytes(), fieldName.getBytes(), field.toString.getBytes())

        //插入数据
        table.put(put)
      })

      //关闭连接
      connection.close()
    })
  }

}
