package com.guchenbo.spark.sql

/**
 * @author guchenbo
 * @date 2022/6/2
 */
object JdbcDataSourceDemo {
  def main(args: Array[String]): Unit = {
    val spark = SparkUtils.sparkSession("Spark jdbc")

    val rdd = spark.sparkContext.makeRDD(List(1, 2, 3, 4, 5))
    SparkUtils.printPart(rdd)

    println("read table")
    val sql = "select * from model_manager.model_info"

    val df = spark.read
      .format("jdbc")
      .option("url", "jdbc:mysql://10.57.16.13:3306/model_manager_zhongyin?useSSL=false")
      .option("user", "model_paas_manager")
      .option("password", "Liu_0123456789")
      .option("driver", "com.mysql.jdbc.Driver")
      .option("dbtable", s"($sql) sub")
//      .option("numPartitions", 5)
//      .option("fetchsize", 1000)
//      .option("partitionColumn", "gmt_create")
//      .option("lowerBound", "2020-08-18 14:13:31")
//      .option("upperBound", "2020-08-24 17:32:46")
      .load()
    SparkUtils.printPart(df.rdd)

    df.rdd.repartition(4)

    SparkUtils.printPart(df.rdd)

  }
}
