package cn.lecosa.spark.hbase

import org.apache.spark.SparkConf
import org.apache.spark.SparkConf
import org.apache.spark.SparkContext
import org.apache.spark.SparkContext
import org.apache.spark.sql._
import org.apache.spark.sql.SQLContext
import org.apache.spark.sql.SQLContext
import org.apache.hadoop.fs.Path
import org.apache.hadoop.hbase.{ HBaseConfiguration, HColumnDescriptor, HTableDescriptor }
import org.apache.hadoop.hbase.client.{ HBaseAdmin, HTable, Put }
import org.apache.hadoop.hbase.mapreduce.TableInputFormat
import org.apache.spark.SparkConf
import org.apache.spark.SparkContext
import org.apache.spark.sql._
import org.apache.spark.sql.SQLContext
import org.apache.hadoop.hbase.util.Bytes
import org.apache.spark._
import org.apache.spark.rdd.NewHadoopRDD
import org.apache.hadoop.hbase.{ HBaseConfiguration, HTableDescriptor }
import org.apache.hadoop.hbase.client.HBaseAdmin
import org.apache.hadoop.hbase.mapreduce.TableInputFormat
import org.apache.spark.sql.hive.HiveContext


import org.apache.hadoop.conf.Configuration
import org.apache.hadoop.hbase.{ TableName, CellUtil, HBaseConfiguration }
import org.apache.hadoop.hbase.client.{ ResultScanner, HTable, Result, Scan }
import org.apache.hadoop.hbase.filter.{ CompareFilter, Filter, RegexStringComparator, RowFilter }
import org.apache.hadoop.hbase.io.ImmutableBytesWritable
import org.apache.hadoop.hbase.mapreduce.TableInputFormat
import org.apache.hadoop.hbase.protobuf.ProtobufUtil
import org.apache.hadoop.hbase.util.Base64
import org.apache.spark.rdd.RDD
import org.apache.spark.{ SparkConf, SparkContext }
import org.slf4j.LoggerFactory

object queryHbase {
  val log = LoggerFactory.getLogger(this.getClass)
  /**
   * hbase连接
   *
   * @param tableName
   * @param serverHost
   * @param serverPort
   * @return
   */
  def getHbaseConfig(tableName: String, serverHost: String, serverPort: String): Configuration = {
    val hbaseConf = HBaseConfiguration.create()
    hbaseConf.set("hbase.zookeeper.quorum", serverHost)
    hbaseConf.set("hbase.zookeeper.property.clientPort", serverPort)
    hbaseConf.set("hbase.rpc.timeout", "3600000")
    hbaseConf.set("hbase.client.scanner.timeout.period", "3600000")
    hbaseConf.set(TableInputFormat.INPUT_TABLE, tableName)
    hbaseConf
  }
  /**
   * hbase查询
   *
   * @param sc
   * @param tableName
   * @param serverHost
   * @param serverPort
   * @param prop
   * @param rowKeyRegex
   * @param partionNum
   * @return
   */
  def queryHbase(sc: SparkContext, tableName: String, serverHost: String, serverPort: String, prop: String, rowKeyRegex: String, partionNum: Int): RDD[(ImmutableBytesWritable, Result)] = {
    val hbaseConf = getHbaseConfig(tableName, serverHost, serverPort)
    log.info("===========successful!")
    val scan: Scan = new Scan()
    val rowFilter: Filter = new RowFilter(CompareFilter.CompareOp.EQUAL, new RegexStringComparator(rowKeyRegex))
    scan.setFilter(rowFilter)
    hbaseConf.set(TableInputFormat.SCAN, convertScanToString(scan))
    val hbaseRDD = sc.newAPIHadoopRDD(
      hbaseConf,
      classOf[TableInputFormat],
      classOf[org.apache.hadoop.hbase.io.ImmutableBytesWritable],
      classOf[org.apache.hadoop.hbase.client.Result])
    println(hbaseRDD.count())
    println(hbaseRDD.take(1))
    return hbaseRDD
  }

  /**
   * 将Scan转换为String作为设置参数输入
   *
   * @param scan
   * @return
   */
  def convertScanToString(scan: Scan) = {
    val proto = ProtobufUtil.toScan(scan)
    Base64.encodeBytes(proto.toByteArray)
  }

  def main(args: Array[String]) {
    val conf = new SparkConf().setAppName(s"${this.getClass.getName}").setMaster("local[2]");
    conf.set("spark.serializer", "org.apache.spark.serializer.KryoSerializer")
    val sc = new SparkContext(conf)
    val tableName = "tab_x_1"
    val serverHost = "park01,192.168.226.131" //ip
    val serverPort = "2181"
    val prop = "prop"
    val rowKeyRegex = "^*\\d1301\\d*$"
    val partionNum = 1
    val hbaseRDD: RDD[(ImmutableBytesWritable, Result)] = queryHbase(sc, tableName, serverHost, serverPort, prop, rowKeyRegex, partionNum)

    println("done1")
    sc.stop()

  }

}
