package org.qnit.dw

import org.apache.spark.sql.{SaveMode, SparkSession}
import org.qnit.util.SparkUtil

object OdsApp {

  def readMysqlDB(spark: SparkSession, table: String) = {
    spark.read.format("jdbc")
      .option("url", "jdbc:mysql://localhost:3306/qnit?useSSL=false")
      .option("driver", "com.mysql.jdbc.Driver")
      .option("dbtable", table)
      .option("user", "root")
      .option("password", "zxy1997")
      .load()
  }

  def writeToHive(spark: SparkSession) = {
    // ods.ods_expert_logbook
    readMysqlDB(spark, "expert_logbook")
      .repartition(1)
      .write.mode(SaveMode.Overwrite)
      .format("parquet")
      .saveAsTable("ods.ods_expert_logbook")
    // ods.ods_expert_logbook_content
    readMysqlDB(spark, "expert_logbook_content")
      .repartition(1)
      .write.mode(SaveMode.Overwrite)
      .format("parquet")
      .saveAsTable("ods.ods_expert_logbook_content")
    // ods.ods_user_info
    readMysqlDB(spark, "user_info")
      .repartition(1)
      .write.mode(SaveMode.Overwrite)
      .format("parquet")
      .saveAsTable("ods.ods_user_info")
  }

  def main(args: Array[String]): Unit = {
    val env = args(0)
    println(s"env=$env")
    if (!SparkUtil.verifyEnv(env))
      System.exit(1)
    val spark: SparkSession = SparkUtil.initSparkSession(env, "OdsApp")

//    writeToHive(spark)

    spark.stop()
  }

}
