package com.qing.spark

//import org.apache.hadoop.hbase.{HBaseConfiguration, HConstants}
//import org.apache.hadoop.mapred.JobConf
//
//import org.apache.hadoop.hbase.util.Bytes
import org.apache.spark.{SparkConf, SparkContext}

/**
  * Created by wuliao on 2018/3/19.
  */
object ExportDataFromHBase {

  def main(args: Array[String]): Unit = {

//    val conf = HBaseConfiguration.create()
//    conf.set("hbase.master", "impala01:60000")
//    conf.set("hbase.zookeeper.quorum", "impala02,impala03,impala04")
    //    conf.set("hbase.master", "127.0.0.1:60000")
    //    conf.set("hbase.zookeeper.quorum", "127.0.0.1:2181")

//    val sparkConf = new SparkConf()
//      .setAppName("ExportDataFromHBase")
//      .setMaster("local")
//    val sc = new SparkContext(sparkConf)

    //创建JobConf，设置输出格式和表名
//    val jobConf = new JobConf(conf, this.getClass)
//    jobConf.setOutputFormat(classOf[TableOutputFormat])
//    jobConf.set(TableInputFormat.INPUT_TABLE, "hour_count")
//
//    val hBaseRDD = sc.newAPIHadoopRDD(conf, classOf[TableInputFormat],
//      classOf[org.apache.hadoop.hbase.io.ImmutableBytesWritable],
//      classOf[org.apache.hadoop.hbase.client.Result])

//    hBaseRDD.map { case (_, result) => {
    //      //获取行键
    //      val key = Bytes.toString(result.getRow)
    //      //通过列族和列名获取列
    //      val packageSize = Bytes.toLong(result.getValue("data".getBytes, "packageSize".getBytes))
    //      val dataSize = Bytes.toLong(result.getValue("data".getBytes, "dataSize".getBytes))
    //      (key, packageSize, dataSize)
    //    }
    //    }.saveAsTextFile("hdfs://175.102.18.112:8020/sflow/hour_count")

    //    hBaseRDD.foreach { case (_, result) => {
    //      //获取行键
    //      val key = Bytes.toString(result.getRow)
    //      //通过列族和列名获取列
    //      val name = Bytes.toString(result.getValue("cf".getBytes, "name".getBytes))
    //      val age = Bytes.toInt(result.getValue("cf".getBytes, "age".getBytes))
    //      println("Row key:" + key + " Name:" + name + " Age:" + age)
    //    }
    //    }

//    val count = hBaseRDD.count()
//    println(count)

  }
}
