package com.shujia.sql

import org.apache.spark.sql.{DataFrame, SparkSession}

object Demo9MapJoin {
  def main(args: Array[String]): Unit = {

    val spark: SparkSession = SparkSession
      .builder()
      .appName("Demo9MapJoin")
      .master("local")
      .config("spark.sql.shuffle.partitions", 1)
      .getOrCreate()

    import spark.implicits._
    import org.apache.spark.sql.functions._

    val student: DataFrame = spark
      .read
      .format("json")
      .load("Spark/data/stu/students.json")


    val score: DataFrame = spark
      .read
      .format("csv")
      .schema("student_id  STRING, cou_id STRING,sco INT")
      .load("Spark/data/stu/score.txt")

    /**
      * DF 实现map join
      *
      */
    val joinDF: DataFrame = score.join(student.hint("broadcast"), $"student_id" === $"id")

    joinDF.show()

    while (true){

    }
  }

}
