package cn.ipanel.bigdata.example

import cn.ipanel.bigdata.boot.date.{Date, Minute}
import org.apache.spark.sql.{DataFrame, SQLContext, SaveMode}
import org.apache.spark.{SparkConf, SparkContext}

/**
  *
  * Spark集群使用Phoenix操作HBase
 *  验证可以用 --- lzz
  *
  */
object SparkSQLOnPhoenix {  // 提交到集群运行
  def main(args:Array[String]):Unit = {
    // 创建SparkConf对象
    val sparkConf = new SparkConf()
      .setMaster("local[2]")
      .setAppName("SparkSQLOnPhoenix")  // 设置Spark应用名称

    // 创建SparkContext上下文对象
    val sparkContext = new SparkContext(sparkConf);
    val sqlContext=new org.apache.spark.sql.SQLContext(sparkContext)

   /* println("开始加载driver：")
    Class.forName("org.apache.phoenix.jdbc.PhoenixDriver")
    println("结束加载driver：")
    println("开始创建conn：")
    val properties = new Properties
    //避免namespace相关问题，添加上配置即可
    properties.put("phoenix.schema.isNamespaceMappingEnabled","true")

    var conn = DriverManager.getConnection("jdbc:phoenix:cdhmaster:2181",properties)
    //var conn = DriverManager.getConnection("jdbc:phoenix:cdhmaster:2181")
    println(conn.isReadOnly)
    println("结束创建conn：")*/

    //读
    val sql =
      s"""
         |(
         |select f_id,f_name,f_sex,f_age
         |from bigdata.t_user
         |) as temp
         |""".stripMargin
    val df = loadDataFromPhoenix(sqlContext, sql)
    println(df.show())

    //写
    val df2 = sqlContext.createDataFrame(Seq(
      (3, "wangwu_" + Date.asMinute.toSimpleMinute, 1, 30),
      (4, "zhaoliu_" + Date.asMinute.toSimpleMinute, 0, 25)
    )).toDF("f_id", "f_name", "f_sex", "f_age")

    df2.show()
    saveDataFrameToPhoenixNew(df2,"bigdata.t_user")

    sparkContext.stop()
  }
  def loadDataFromPhoenix(sqlContext: SQLContext, table: String): DataFrame = {
    sqlContext.read.format("jdbc")
      .option("driver", "org.apache.phoenix.jdbc.PhoenixDriver")
      .option("dbtable", table)
      .option("hbase.rpc.timeout", "600000")
      .option("hbase.client.scanner.timeout.period", "600000")
      .option("url", "jdbc:phoenix:192.168.35.113,192.168.35.115,192.168.35.118:2181")
//      .option("url", "jdbc:phoenix:master,slave1,slave2:2181")
      .load
  }

  /**
   * 保存数据到Phoenix
   *
   * @param dataFrame dataFrame
   * @param table 表名
   */
  def saveDataFrameToPhoenixNew(dataFrame: DataFrame, table: String): Unit = {
    dataFrame.write.format("org.apache.phoenix.spark")
      .mode(SaveMode.Overwrite)
      .options(Map("table" -> table, "zkUrl" -> "jdbc:phoenix:192.168.35.113,192.168.35.115,192.168.35.118:2181"))
      .save()
  }

}
