package com.galeno.sparksql

import org.apache.log4j.{Level, Logger}
import org.apache.spark.sql.{DataFrame, SparkSession}

import java.util.Properties

/**
 * @Title: ${file_name}
 * @Description: ${todo}
 * @author galeno
 * @date 2021/9/422:10
 */
object SparkSql07 {
  def main(args: Array[String]): Unit = {
    Logger.getLogger("org.apache").setLevel(Level.WARN)
    val spark = SparkSession.builder()
      .master("local")
      .appName("")
      .getOrCreate()
    // 映射mysql中的表为dataframe
    val properties = new Properties()
    properties.setProperty("user","root")
    properties.setProperty("password","root")

    val df1: DataFrame = spark.read.jdbc("jdbc:mysql://localhost:3306/spark", "area", "id", 1, 500000, 2, properties)
    val df2: DataFrame = spark.read.jdbc("jdbc:mysql://localhost:3306/spark", "area", "id", 1, 500000, 2, properties)
    val df3: DataFrame = spark.read.jdbc("jdbc:mysql://localhost:3306/spark", "area", "id", 1, 500000, 2, properties)
    val df4: DataFrame = spark.read.jdbc("jdbc:mysql://localhost:3306/spark", "area", "id", 1, 500000, 2, properties)
    df1.printSchema()
    df1.show()
    df1.createTempView("df1")
    df2.createTempView("df2")
    df3.createTempView("df3")
    df4.createTempView("df4")

    spark.sql(
      """
        |
        |
        |select
        |*
        |from
        |df1 join df2 on df1.id=df2.parentid
        |
        |
        |
        |
        |""".stripMargin).show()







  }

}
