package com.imooc.spark.streaming.dao

import com.imooc.spark.streaming.kafka.utils.{HBaseUtils}

import scala.collection.mutable.ListBuffer

/**
  * @description 用户商品点击量hbase-dao层
  * @author yuyon26@126.com
  * @date 2018/11/5 9:39
  */
object ShopClickCountDao {

  val tableName = "userAgent_shop_click_count"
  val cf = "info"
  val qualifer = "day_shopId_count"

  /**
    * 批量保存数据到Hbase
    *
    * @param list ShopClickCount集合
    */
/*  def insert(list: ListBuffer[ShopClickCount]) = {
    val hbaseUtil = HBaseUtils.getInstance();
    for (ele <- list) {
      val rowKey = ele.day_shopId
      hbaseUtil.putDataH(tableName, rowKey, cf, qualifer, ele.click_count + count(rowKey))
    }
  }*/

  /**
    * 根据rowKey查询值
    *
    * @param rowKey
    * @return
    */
  def count(rowKey: String): Long = {
    val hbaseUtil = HBaseUtils.getInstance();
    val value = hbaseUtil.getValueBySeriesH(tableName, rowKey, cf, qualifer);
    if (null == value) {
      0L
    } else {
      value.toLong
    }
  }

  def main(args: Array[String]): Unit = {
    println(count("20171111_88"))
  }

}
