package doit20.sparksql

import java.util.Properties

import org.apache.spark.Partition
import org.apache.spark.sql.SparkSession

/**
 * @author 涛哥
 * @nick_name "deep as the sea"
 * @contact qq:657270652 wx:doit_edu
 * @site www.doitedu.cn
 * @date 2021-04-10
 * @desc sparksql读取jdbc时的分区控制
 */
object Demo6 {


  def main(args: Array[String]): Unit = {


    val spark = SparkSession.builder()
      .appName("")
      .config("spark.sql.shuffle.partitions","10")
      .master("local")
      .getOrCreate()


    // 通过jdbc读取mysql中的表映射为dataframe
    val properties = new Properties()
    properties.setProperty("user","root")
    properties.setProperty("password","123456")
    val df = spark.read.jdbc("jdbc:mysql://localhost:3306/abc", "area_flat", "id", 1000, 2000, 4, properties)

    // 获取分区元信息
    val partitions: Array[Partition] = df.rdd.partitions
    partitions.foreach(println)


    val frame = spark.read.jdbc("jdbc:mysql://localhost:3306/abc", "area_flat", Array("id<1500 or id is null", "id>=1500 and id<2000", "id>=2000"), properties)
    frame.rdd.partitions.foreach(println)
    frame.show(100,false)

    spark.close()
  }
}
