package com.ipinyou.hb2hf

import org.apache.spark.SparkConf
import org.apache.spark.SparkContext
import com.ipinyou.hbase.model.MUser
import com.ipinyou.hbase.service.impl.HBaseConnectionServiceImpl
import org.slf4j.LoggerFactory
import org.apache.hadoop.hbase.HBaseConfiguration
import org.apache.hadoop.hbase.mapreduce.TableInputFormat
import org.apache.hadoop.hbase.util.Bytes
import scala.collection.mutable.ArrayBuffer
import org.apache.hadoop.hbase.client.Scan
import java.io.ByteArrayOutputStream
import java.io.DataOutputStream
import org.apache.hadoop.hbase.util.Base64

object hb2hb {

  val a = 1.0
  val b = 2.0

  // 0 测试, 1 正式
  var flag = "0"

  val hbaseServerce = new HBaseConnectionServiceImpl();

  // log
  val log = LoggerFactory.getLogger(this.getClass)

  def getPyPartiton(pyid: String): String = {
    val py_hash = pyid.hashCode()
    val py_partition = Math.abs(py_hash) % 1050
    val partition = "%04d".format(py_partition)
    partition
  }

  def save2HbaseByBatch(iter: Iterator[(String, Float)]) {
    val musers = new ArrayBuffer[MUser]()
    while (iter.hasNext) {
      val muser = new MUser()
      val arr = iter.next()
      val rowkey = arr._1
      val weight = arr._2
      muser.setId(rowkey)
      muser.setContent(weight.toString)
      musers.append(muser)
    }
    hbaseServerce.insert(musers: _*)
  }

  def main(args: Array[String]): Unit = {

    if (args.length > 0) {
      flag = args(0)
    }

    val config = new SparkConf().setAppName("mobile.hb2hb")
    val sc = new SparkContext(config)

    val conf = HBaseConfiguration.create()
    val scan = new Scan()
    conf.set(TableInputFormat.INPUT_TABLE, "mprofile")
//    conf.set(TableInputFormat.SCAN_BATCHSIZE, "10")

    val hBaseRDD = sc.newAPIHadoopRDD(conf, classOf[TableInputFormat],
      classOf[org.apache.hadoop.hbase.io.ImmutableBytesWritable],
      classOf[org.apache.hadoop.hbase.client.Result])

    //    //遍历输出
    val resRdd = hBaseRDD.flatMap {
      case (_, result) =>
        val kvs = result.raw
        val rowkey = new String(result.getRow)
        val arr = rowkey.split("""\|""", -1)
        val tag = arr(1)
        val pyid = arr(2)

        val py_partition = getPyPartiton(pyid)
        for (kv <- kvs) yield (List(py_partition, tag, pyid).mkString("|"), new String(kv.getValue).toFloat)
    }

    resRdd.foreachPartition(save2HbaseByBatch)

  }
}