package com.shujia.spark.optimize

import org.apache.spark.sql.{DataFrame, SparkSession}
object Demo03MapJoin {
  def main(args: Array[String]): Unit = {
    val spark: SparkSession = SparkSession.builder()
      .master("local[4]")
      .appName("join")
      .config("spark.sql.shuffle.partitions", 2)
      .getOrCreate()

    //导入隐式转换
    import spark.implicits._

    val student: DataFrame = spark.read
      .schema("id STRING , name STRING ,age INT ,gender STRING ,clazz STRING")
      .csv("data/students.txt")

    val score: DataFrame = spark.read
      .schema("sid STRING ,cId STRING , sco INT")
      .csv("data/score.txt")

    /**
      * .hint("broadcast") : 广播表实现m ap join
      */

    student.join(score.hint("broadcast"),$"sId" ===$"id")
//      .show(1000)

    //使用sql实现

    student.createOrReplaceTempView("student")
    score.createOrReplaceTempView("score")

    spark.sql(
      """
        |select /*+broadcast(a) */* from score as a
        |join student as b
        |on a.sId = b.id
      """.stripMargin)
      .show(1000)
  }
}
