package doit20.sparksql

import java.util.Properties

import org.apache.spark.SparkConf
import org.apache.spark.sql.SparkSession

/**
 * @author 涛哥
 * @nick_name "deep as the sea"
 * @contact qq:657270652 wx:doit_edu
 * @site www.doitedu.cn
 * @date 2021-04-09
 * @desc 从外部服务系统中读取数据创建dataframe
 */
object Demo5 {

  def main(args: Array[String]): Unit = {

    val conf = new SparkConf()
    conf.set("spark.sql.shuffle.partitions","5")

    val spark = SparkSession.builder()
      .appName("")
      .master("local")
      .config(conf)
      .getOrCreate()





    // 读mysql中的数据为dataframe
    val properties = new Properties()
    properties.setProperty("user","root")
    properties.setProperty("password","123456")
    val df = spark.read.jdbc("jdbc:mysql://localhost:3306/abc", "t_md_areas", properties)

    //df.printSchema()

    //df.show(20,false)

    df.createTempView("t")

    val res = spark.sql(
      """
        | -- 把原来的层级数据结构，转成扁平结构：
        | -- 省,市,区,BD经度,BD维度
        |
        |select
        |
        |province.areaname  as province,
        |city.areaname   as city,
        |region.areaname  as region,
        |region.bd09_lng as lng,
        |region.bd09_lat as lat
        |from t region  join  t city  on region.parentid=city.id and region.level = 3
        |               join  t province  on city.parentid=province.id
        |
        |""".stripMargin)


    //res.show(10,false)


    res.write.jdbc("jdbc:mysql://localhost:3306/abc","area_flat",properties)


    spark.close()
  }
}
