package Hbase

import org.apache.spark.sql.SparkSession

object SparkHbase {
/*  def main(args: Array[String]): Unit = {
    // 作为Client 请求hbase 首先找zk
    val ZOOKEEPER_QUORUM = "192.168.27.221,192.168.27.222,192.168.27.223"
    // 读取hive中数据写入到Hbase，创建sparkSession
    val spark = SparkSession.builder()
      .appName("hive to hbase")
      .enableHiveSupport()
      .getOrCreate()
    val rdd = spark.sql("select order_id, user_id,order_dow, from badou.orders limit 100").rdd
    rdd.map{row =>
      val ordering_id = row(0).asInstanceOf[String]
      val user_id = row(1).asInstanceOf[String]
      val order_row = row(2).asInstanceOf[String]
      var p = new Put(Bytes.toBytes(user_id))

    }


  }*/
}
