package com.study.spark.scala.phoenix

import org.apache.log4j.{Level, Logger}
import org.apache.spark.sql.types.{IntegerType, StringType, StructField, StructType}
import org.apache.spark.sql.{Row, SaveMode, SparkSession}

/**
 * cdh Phoenix demo
 *
 * @author stephen.shen
 * @email shenzhaoxiang@gmail.com
 * @date 2020-02-19 11:09
 */
object PhoenixDemo {

  def main(args: Array[String]): Unit = {
    //屏蔽不必要的日志显示在终端上
    Logger.getLogger("org.apache.spark").setLevel(Level.WARN)

    val spark = SparkSession
      .builder()
      .appName("phoenix-demo")
      .master("local")
      .getOrCreate()

    //    DROP TABLE "tbl_user";
    //    CREATE TABLE "tbl_user" ("id" VARCHAR PRIMARY KEY, "info"."name" VARCHAR, "info"."age" INTEGER) COLUMN_ENCODED_BYTES=0;
    //    UPSERT INTO "tbl_user" values('row001','tom',20);
    //    UPSERT INTO "tbl_user" values('row002','jack',21);
    //    UPSERT INTO "tbl_user" values('row003','lucy',22);
    //    SELECT * FROM "tbl_user";


    val TABLE = "\"tbl_user\""
    // spark 读取 phoenix 返回 DataFrame的第一种方式
    val df = spark.read
      .format("org.apache.phoenix.spark")
      .option("table", TABLE)
      .option("zkUrl", "jdbc:phoenix:node202,node203,node204")
      .load()

    // spark 读取 phoenix 返回 DataFrame的第二种方式
    //    val df = spark.read
    //      .format("jdbc")
    //      .option("driver", "org.apache.phoenix.jdbc.PhoenixDriver")
    //      .option("url", "jdbc:phoenix:node202:2181")
    //      .option("dbtable", TABLE)
    //      .load()

    df.createOrReplaceTempView("user")

    df.printSchema()
    df.show()

    // 存储
    val dataSet = List(Row("row003", "name-3", 30), Row("row004", "name-4", 30), Row("row005", "name-5", 30))

    //    val schema = StructType(
    //      Seq(StructField("\"id\"", IntegerType, nullable = false),
    //        StructField("\"info\".\"name\"", StringType),
    //        StructField("\"info\".\"age\"", IntegerType)))

    val schema = StructType(
      Seq(StructField("\"id\"", StringType, nullable = false),
        StructField("\"name\"", StringType),
        StructField("\"age\"", IntegerType)))

    val rowRDD = spark.sparkContext.parallelize(dataSet)

    val outputDF = spark.sqlContext.createDataFrame(rowRDD, schema)

    outputDF.write
      .format("org.apache.phoenix.spark")
      .option("table", TABLE)
      .option("zkUrl", "jdbc:phoenix:node202,node203,node204")
      .mode(SaveMode.Overwrite)
      .save()

  }
}
