package com.streaming.project.spark.dao

import com.imooc.spark.project.utils.HBaseUtils
import com.streaming.project.spark.domain.CourseClickCount
import org.apache.hadoop.hbase.client.Get
import org.apache.hadoop.hbase.util.Bytes

import scala.collection.mutable.ListBuffer

/**
  *Create by maomao
  * 数据访问层
  */

object CourseClickCountDAO {

  val tableName = "imooc_course_clickout"

  val cf = "info"

  val qualifer = "click_count"

  /**
    * 保存数据到Hbase
    * @param list
    */

  def  sava(list: ListBuffer[CourseClickCount]): Unit ={

     val table = HBaseUtils.getINSTANCE().getTable(tableName)

     for(els<-list){

       table.incrementColumnValue(Bytes.toBytes(els.day_course),
         Bytes.toBytes(cf),
         Bytes.toBytes(qualifer),els.clickcount
       )
     }
  }

  /**
    * 根据RowKey来查询值
    * @param day_course
    * @return
    */

  def count(day_course:String):Long={

    val table = HBaseUtils.getINSTANCE.getTable(tableName)

    val get  =  new Get(Bytes.toBytes(day_course))

    val value = table.get(get).getValue(cf.getBytes(),qualifer.getBytes())

    if(value == null){

      0L
      //第一次是没有值的，所以没有值的情况下，返回0L
    } else
       {

         Bytes.toLong(value)

       }

  }


  def main(args: Array[String]): Unit = {

    val list = new ListBuffer[CourseClickCount]

    list.append(CourseClickCount("20190303_8",8))

    list.append(CourseClickCount("20190303_9",9))

    list.append(CourseClickCount("20190303_90",90))

    list.append(CourseClickCount("20190303_900",900))

    list.append(CourseClickCount("20190303_10",10))

    sava(list)

    println(count("20190303_8"))

    println(count("20190303_9"))

    println(count("20190303_90"))

    println(count("20190303_900"))

    println(count("20190303_10"))

  }

}
