package com.study.spark.scala.phoenix

import org.apache.log4j.{Level, Logger}
import org.apache.spark.sql.{SaveMode, SparkSession}

/**
 * 从mysql读取数据存储到hbase
 *
 * @author stephen.shen
 * @email shenzhaoxiang@gmail.com
 * @date 2020-02-19 15:19
 */
object FromMySQLToPhoenixDemo {

  def main(args: Array[String]): Unit = {

    //屏蔽不必要的日志显示在终端上
    Logger.getLogger("org.apache.spark")
      .setLevel(Level.WARN)

    val INPUT_TABLE = "idc_online_data"
    val OUTPUT_TABLE = "idc_online_data"

    val spark = SparkSession.builder()
      .appName("phoenix-demo")
      .master("local")
      .getOrCreate()

    // 读取mysql数据
    val jdbcDF = spark.read
      .format("jdbc")
      .option("url", "jdbc:mysql://192.168.8.126:3306/idc-platform?useSSL=false&useUnicode=true&characterEncoding=UTF-8")
      .option("dbtable", INPUT_TABLE)
      .option("user", "open")
      .option("password", "open123")
      .load()

    jdbcDF.printSchema()

    // 过滤掉不需要的数据
    val df = jdbcDF.drop("guid")
    df.show(10)

    df.write
      .format("org.apache.phoenix.spark")
      .option("table", OUTPUT_TABLE)
      .option("zkUrl", "jdbc:phoenix:node202,node203,node204")
      .mode(SaveMode.Overwrite)
      .save()

    spark.stop()
  }
}
