package test

import org.apache.spark.SparkConf
import org.apache.spark.SparkContext
import org.slf4j.LoggerFactory

import com.ipinyou.hbase.model.MUser
import com.ipinyou.hbase.service.impl.HBaseConnectionServiceImpl

object t2 {

  val log = LoggerFactory.getLogger(this.getClass)

  def write2base() {

  }
  val hbaseServerce = new HBaseConnectionServiceImpl()

  def getFromHbase(iter: Iterator[(String, Float)]): Iterator[(String, Float)] = {
    val muser = new MUser()
    var reslist = List[(String, Float)]()

    while (iter.hasNext) {
      val arr = iter.next()
      val id = arr._1
      val sweight = arr._2
      val rlist = hbaseServerce.get(muser, id)

      if (!rlist.isEmpty()) {
        val ruser = rlist.get(0)
        val uid = ruser.getId
        val weight = ruser.getContent.toFloat
        reslist = reslist.::(uid, weight + sweight)
      } else {
        reslist = reslist.::(id, sweight)
      }
    }
    println(reslist)
    reslist.iterator
  }

  def save2Hbase(iter: Iterator[(String, Float)]) {
    val muser = new MUser()
    while (iter.hasNext) {
      val arr = iter.next()
      val rowkey = arr._1
      val weight = arr._2
      muser.setId(rowkey)
      muser.setContent(weight.toString())
      hbaseServerce.insert(muser)
    }
  }

  def main(args: Array[String]) {

    val companyid = "762"
    val predict_pyids = "file:/Users/miaoyujia/tmp/pre_pyids"
    val input = "file:/Users/miaoyujia/tmp/imp_log"
    val output = "/Users/miaoyujia/tmp/result"

    val sc = new SparkContext(new SparkConf().setAppName("offline").setMaster("local"))

    val impRdd = sc.textFile(predict_pyids).cache()

    val totalRecords = sc.accumulator(0)

    val zrdd = impRdd.map { x =>
      val arr = x.split("\t")
      val rowkey = arr(0)
      val weight = arr(3).toFloat
      (rowkey, weight)
    }.mapPartitions(getFromHbase)

    //    zrdd.foreach(println)
    zrdd.foreachPartition(save2Hbase)

    log.info("totalRecords : " + totalRecords.value.toString())
  }

}