package io.sqrtqiezi.spark.dataframe

import org.apache.spark.sql.SparkSession
import org.apache.spark.sql.functions.broadcast

object JoinOperator {
  def main(args: Array[String]): Unit = {
    val spark = SparkSession
      .builder()
      .master("local")
      .appName("join sample")
      .getOrCreate()

    import spark.implicits._

    val person = Seq(
      (0, "Bill Chambers", 0, Seq(100)),
      (1, "Matt Zaharia", 1, Seq(500, 250, 100)),
      (2, "Michael Armbrust", 1, Seq(250, 100))
    ).toDF("id", "name", "graduate_program", "spark_status")

    val graduateProgram = Seq(
      (0, "Masters", "School of Information", "UC Berkeley"),
      (1, "Masters", "EECS", "UC Berkeley"),
      (2, "Ph.D", "EECS", "UC Berkeley")
    ).toDF("id", "degree", "department", "school")

    val sparkStatus = Seq(
      (500, "Vice President"),
      (250, "PMC Member"),
      (100, "Contributor")
    ).toDF("id", "status")

    person.show
    graduateProgram.show
    sparkStatus.show

    val joinExpression = person.col("graduate_program") === graduateProgram.col("id")
    println(person.join(graduateProgram, joinExpression).explain)
    person.join(graduateProgram, joinExpression).show

    person.join(graduateProgram, joinExpression, "outer").show

    person.join(graduateProgram, joinExpression, "left_outer").show

    person.join(graduateProgram, joinExpression, "right_outer").show

    person.join(graduateProgram, joinExpression, "left_semi").show

    person.join(graduateProgram, joinExpression, "left_anti").show

    // 广播表
    println(person.join(broadcast(graduateProgram), joinExpression).explain)
  }
}
