package hb2redis

import org.apache.hadoop.hbase.HBaseConfiguration
import org.apache.hadoop.hbase.mapreduce.TableInputFormat
import org.apache.spark.SparkConf
import org.apache.spark.SparkContext
import org.slf4j.LoggerFactory
import scala.collection.mutable.ArrayBuffer

object ab {

  val a = 1.0
  val b = 2.0
  // log
  val log = LoggerFactory.getLogger(this.getClass)

  def main(args: Array[String]): Unit = {

    val config = new SparkConf().setAppName("mobile.scoring").setMaster("local[2]")
    val sc = new SparkContext(config)

    val conf = HBaseConfiguration.create()
    conf.set(TableInputFormat.INPUT_TABLE, "mprofile")
    conf.set(TableInputFormat.SCAN_MAXVERSIONS, "30")

    val hBaseRDD = sc.newAPIHadoopRDD(conf, classOf[TableInputFormat],
      classOf[org.apache.hadoop.hbase.io.ImmutableBytesWritable],
      classOf[org.apache.hadoop.hbase.client.Result])

    println("hbase count: " + hBaseRDD.count())
    println("==========================================")

    //    //遍历输出
    hBaseRDD.map {
      case (_, result) =>
        //        for (kv <- kvs) yield List(new String(kv.getRow), kv.getTimestamp, new String(kv.getValue))
        val kvs = result.raw
        val list = new ArrayBuffer[Float]()
        val rowkey = new String(result.getRow)
        for (kv <- kvs) {
          list.append(new String(kv.getValue).toFloat)
        }
        (rowkey, list)
    }.map {
      case (k, v) =>
        val score = a * v.take(7).sum + b * v.sum
        (k, score)
    }.foreach(println)

  }
}