package com.sugon.bt

import org.apache.spark.sql.{DataFrame, SparkSession}

/**
  *
  */
object ClickHouseToHivePlus {

  def main(args: Array[String]): Unit = {

    if (args.length < 3) {
      throw new IllegalArgumentException("输入参数错误,参数依次为 '源库库名 源表列表'")
    }
    // 输入参数
    val sourceDbName = args(0)
    val sourceTables = args(1)

    val tableNames: Array[String] = sourceTables.split(",")

    val spark: SparkSession = SparkSession.builder().appName("ClickHouseToHive")
      //      .master("local[*]")
      .enableHiveSupport()
      .getOrCreate()

    for (tableName <- tableNames) {
      val df: DataFrame = spark.read
        .format("jdbc")
        .option("driver", "ru.yandex.clickhouse.ClickHouseDriver")
        .option("url", "jdbc:clickhouse://slave14:8123/" + sourceDbName)
        .option("dbtable", tableName)
        .load()


      df.write.format("orc").mode("append").saveAsTable("ysk." + tableName)
    }

    spark.close()
  }

}
