package test

import org.apache.spark.SparkConf
import org.apache.spark.SparkContext
import org.slf4j.LoggerFactory

import com.ipinyou.utils.HbaseUtils
import org.apache.hadoop.hbase.client.HTable
import org.apache.hadoop.hbase.HBaseConfiguration
import org.apache.hadoop.hbase.client.Put
import org.apache.hadoop.hbase.util.Bytes
import org.apache.hadoop.hbase.TableName

object local {

  val log = LoggerFactory.getLogger(this.getClass)

  def write2base() {

  }

  def main(args: Array[String]) {

    val tt = new HbaseUtils()

    val companyid = "762"
    val predict_pyids = "file:/Users/miaoyujia/tmp/pre_pyids"
    val input = "file:/Users/miaoyujia/tmp/imp_log"
    val output = "/Users/miaoyujia/tmp/result"

    val sc = new SparkContext(new SparkConf().setAppName("offline").setMaster("local"))

    val impRdd = sc.textFile(predict_pyids).cache()

    val totalRecords = sc.accumulator(0)

    // impRdd.foreach(println)

    impRdd.foreach(x => {

      val arr = x.split("\t")

      val tableName = "mprofile"
      val rowkey = arr(0)
      val family = "f1"
      val column = "w"
      val value = "555"

      totalRecords += 1

      // tt.addOneRecord(tableName, rowkey, family, column, value.getBytes)
      // tt.getKey(tableName, rowkey)

      //Hbase配置
      //      val hbaseConf = HBaseConfiguration.create()
      //
      //      //用户ID
      //      val uid = arr(0)
      //      //点击次数
      //      val click = "35"
      //      //组装数据
      //      val put = new Put(Bytes.toBytes(uid))
      //      put.add("f1".getBytes, "w".getBytes, Bytes.toBytes(click))
      //
      //      val StatTable = new HTable(hbaseConf, TableName.valueOf(tableName))
      //      StatTable.setAutoFlush(false, false)
      //      //写入数据缓存
      //      StatTable.setWriteBufferSize(3 * 1024 * 1024)
      //      StatTable.put(put)
      //      //提交
      //      StatTable.flushCommits()
    })

    log.info("totalRecords : " + totalRecords.value.toString())
  }

}