package com.archgeek.spark.examples.v20201223

import org.apache.avro.Schema
import org.apache.avro.generic.GenericData
import org.apache.hadoop.hbase.HBaseConfiguration
import org.apache.hadoop.hbase.spark.HBaseContext
import org.apache.hadoop.hbase.spark.datasources.HBaseTableCatalog
import org.apache.hadoop.hbase.spark.example.datasources.AvroHBaseRecord
import org.apache.spark.SparkContext
import org.apache.spark.sql.{DataFrame, SparkSession}

/**
 *
 *
 * Create by pizhihui on 2020-12-23
 */
object HbaseSparkDemo {

  def main(args: Array[String]): Unit = {

    val sparkSession = SparkSession.builder().appName("test").getOrCreate()
    val sqlContext = sparkSession.sqlContext
    val sc = sparkSession.sparkContext

    val config = HBaseConfiguration.create()
    config.set("hbase.zookeeper.quorum", "1.hadoop3.com")

    val hbaseContext = new HBaseContext(sc, config)

    def catalog = s"""{
                     |"table":{"namespace":"default", "name":"table1"},
                     |"rowkey":"key",
                     |"columns":{
                     |"col0":{"cf":"rowkey", "col":"key", "type":"string"},
                     |"col1":{"cf":"cf1", "col":"col1", "type":"boolean"},
                     |"col2":{"cf":"cf2", "col":"col2", "type":"double"},
                     |"col3":{"cf":"cf3", "col":"col3", "type":"float"},
                     |"col4":{"cf":"cf4", "col":"col4", "type":"int"},
                     |"col5":{"cf":"cf5", "col":"col5", "type":"bigint"},
                     |"col6":{"cf":"cf6", "col":"col6", "type":"smallint"},
                     |"col7":{"cf":"cf7", "col":"col7", "type":"string"},
                     |"col8":{"cf":"cf8", "col":"col8", "type":"tinyint"}
                     |}
                     |}""".stripMargin


    def withCatalog(cat: String): DataFrame = {
      sqlContext
        .read
        .options(Map(HBaseTableCatalog.tableCatalog->cat))
        .format("org.apache.hadoop.hbase.spark")
        .load()
    }
    val df = withCatalog(catalog)

    df.createOrReplaceTempView("table1")
    val resDF = sqlContext.sql("select count(col1) from table1")



    val  dataSet = sparkSession.read
                .format("org.apache.hadoop.hbase.spark")
                .option(HBaseTableCatalog.tableCatalog, catalog)
                .load()
    dataSet.createOrReplaceTempView("dataSet");
    val sqlRow = sqlContext.sql("select * from dataSet");  //通过sql语句查询hbase数据


  }

}
