package com.zyh.day05.operator

import org.apache.spark.sql.functions.lit
import org.apache.spark.sql.{DataFrame, Dataset, Row, SparkSession}

object JoinTest {
  def main(args: Array[String]): Unit = {
    val spark: SparkSession = SparkSession.builder()
      .master("local[*]")
      .appName("lt")
      .getOrCreate()

    import spark.implicits._
    val stuDF = List((1, "张三", 1001), (2, "李四", 1001), (3, "王五", 1002), (4, "赵六", 1003))
      .toDF("id","name","cid")

    val classDF = List((1001, "Java班"), (1002, "UI班"),(1004,"Python班"))
      .toDF("id","cname")

    //内链接
    //val result: DataFrame = stuDF.join(classDF, stuDF("cid") === classDF("id"))
    //左外链接
    //val result: DataFrame = stuDF.join(classDF, stuDF("cid") === classDF("id"),"left")
    //全外连接
    //val result: DataFrame = stuDF.join(classDF, stuDF("cid") === classDF("id"),"full")
    //合并 //需保证两表字段数量一样
    val result: Dataset[Row] = stuDF.union(classDF.withColumn("default", lit(1)))
    result.show()
    spark.close()

  }
}
