package com.hliushi.spark.sql

import org.apache.spark.sql.{DataFrame, SparkSession}
import org.junit.Test

/**
 * descriptions:
 *
 * author: Hliushi
 * date: 2021/5/20 10:35
 */
class JoinProcessor {

  // 1.创建SparkSession
  val spark: SparkSession = SparkSession.builder()
    .appName("agg_processor")
    .master("local[6]")
    .getOrCreate()

  import spark.implicits._

  // 创建数据集
  private val person: DataFrame = Seq((0, "Lucy", 0), (1, "Lily", 0), (2, "Tim", 2), (3, "Danial", 0), (4, "hliushi", 3))
    .toDF("id", "name", "cityId")
  person.createOrReplaceTempView("person")


  private val cities: DataFrame = Seq((0, "Beijing"), (1, "Shanghai"), (2, "Guangzhou"))
    .toDF("id", "name")
  cities.createOrReplaceTempView("cities")

  //import org.apache.spark.sql.functions._


  /**
   * def join(right: Dataset[_], joinExprs: Column, joinType: String): DataFrame
   * #
   * def join(right: Dataset[_], joinExprs: Column): DataFrame
   */
  @Test
  def introJoin(): Unit = {
    // 1.创建数据集

    // 2.进行join操作
    /**
     * 这里使用 column有绑定Dataset的两种创建方式
     */
    val dataFrame = person.join(cities, person.col("cityId") === cities("id"))
      .select(
        person("id"),
        person.apply("name"),
        cities.col("name") as "city")

    dataFrame.createOrReplaceTempView("user_city")

    /**
     * 通过对这张表的查询, 这个查询是作用域两张表的, 所以是同一时间访问了多条数据
     */
    val sqlStr =
      """
        |select
        |   id, name, city
        |from
        |   user_city
        |where
        |   city = 'Beijing'
        |""".stripMargin

    spark.sql(sqlStr).show()

    //  +---+------+-------+
    //  | id|  name|   city|
    //  +---+------+-------+
    //  |  0|  Lucy|Beijing|
    //  |  1|  Lily|Beijing|
    //  |  3|Danial|Beijing|
    //  +---+------+-------+
  }


  /**
   * cross join 笛卡尔积, 就是两个表中所有的数据两两结对
   */
  @Test
  def crossJoin(): Unit = {

    /**
     * where写在cross join[笛卡尔积] 之后, 优化器会优化的, 把where提取cross join的前面
     */
    person.crossJoin(cities)
      .where(person.col("cityId") === cities.col("id"))
      .show()

    //  +---+------+------+---+---------+
    //  | id|  name|cityId| id|     name|
    //  +---+------+------+---+---------+
    //  |  0|  Lucy|     0|  0|  Beijing|
    //  |  1|  Lily|     0|  0|  Beijing|
    //  |  2|   Tim|     2|  2|Guangzhou|
    //  |  3|Danial|     0|  0|  Beijing|
    //  +---+------+------+---+---------+

    val sqlStr =
      """
        |select
        |   u.id, u.name, c.name as city
        |from
        |   person u
        |cross join cities c
        |where u.cityId = c.id
        |
        |""".stripMargin

    spark.sql(sqlStr).show()
    //  +---+------+---------+
    //  | id|  name|     city|
    //  +---+------+---------+
    //  |  0|  Lucy|  Beijing|
    //  |  1|  Lily|  Beijing|
    //  |  2|   Tim|Guangzhou|
    //  |  3|Danial|  Beijing|
    //  +---+------+---------+
  }


  @Test
  def crossJoin2(): Unit = {
    person.crossJoin(cities)
      .where(person.col("cityId") === cities.col("id"))
      .show()

    spark.sql("select u.id, u.name, c.name from person u cross join cities c " +
      "where u.cityId = c.id")
      .show()
  }


  /**
   * 内连接 inner join
   * join方法默认不写joinType, 就是inner
   * #
   * joinType 的连接类型 默认不写就是 `inner`. Must be one of:
   * *   `inner`, `cross`, `outer`, `full`, `full_outer`, `left`, `left_outer`,
   * *   `right`, `right_outer`, `left_semi`, `left_anti`.
   */
  @Test
  def innerJoin(): Unit = {
    person.join(cities, person.col("cityId") === cities.col("id")
      , joinType = "inner")
      .show()

    spark.sql("select p.id, p.name, c.name " +
      "from person p inner join cities c on p.cityId = c.id")
      .show()
    //  +---+------+---------+
    //  | id|  name|     name|
    //  +---+------+---------+
    //  |  0|  Lucy|  Beijing|
    //  |  1|  Lily|  Beijing|
    //  |  2|   Tim|Guangzhou|
    //  |  3|Danial|  Beijing|
    //  +---+------+---------+
  }


  /**
   * 全外连接  full join
   * joinType   full  full_outer
   */
  @Test
  def fullJoin(): Unit = {
    // 内连接, 就是只是显示能连接上的数据, 外连接包含一部分没有连接上的数据,
    person.join(cities, person.col("cityId") === cities.col("id"),
      joinType = "full")
      .show()
    //  +----+-------+------+----+---------+
    //  |  id|   name|cityId|  id|     name|
    //  +----+-------+------+----+---------+
    //  |null|   null|  null|   1| Shanghai|
    //  |   4|hliushi|     3|null|     null|
    //  |   2|    Tim|     2|   2|Guangzhou|
    //  |   0|   Lucy|     0|   0|  Beijing|
    //  |   1|   Lily|     0|   0|  Beijing|
    //  |   3| Danial|     0|   0|  Beijing|
    //  +----+-------+------+----+---------+


    /**
     * 这种字符串sql写的时候, 拼接字符串的时候, 注意在每行最后面加上一行 空格
     * 最好不建议这样写SQL, 还是使用 """"""这种个字符串写SQL更好一些
     */
    spark.sql("select p.id, p.name, c.name " +
      "from person p full join cities c " +
      "on p.cityId = c.id")
      .show()
  }


  /**
   * 左外连接   joinType    `left`, `left_outer`
   * 右外连接   joinType    `right`, `right_outer`
   */
  @Test
  def leftJoinAndRightJoin(): Unit = {
    person.join(cities, person.col("cityId") === cities.col("id"),
      joinType = "left_outer")
      .show()
    //  +---+-------+------+----+---------+
    //  | id|   name|cityId|  id|     name|
    //  +---+-------+------+----+---------+
    //  |  0|   Lucy|     0|   0|  Beijing|
    //  |  1|   Lily|     0|   0|  Beijing|
    //  |  2|    Tim|     2|   2|Guangzhou|
    //  |  3| Danial|     0|   0|  Beijing|
    //  |  4|hliushi|     3|null|     null|
    //  +---+-------+------+----+---------+

    spark.sql("select p.id, p.name, c.name " +
      "from person p left join cities c " +
      "on p.cityId = c.id")
      .show()

    person.join(cities, person.col("cityId") === cities.col("id"),
      joinType = "right")
      .show()
    //  +----+------+------+---+---------+
    //  |  id|  name|cityId| id|     name|
    //  +----+------+------+---+---------+
    //  |   3|Danial|     0|  0|  Beijing|
    //  |   1|  Lily|     0|  0|  Beijing|
    //  |   0|  Lucy|     0|  0|  Beijing|
    //  |null|  null|  null|  1| Shanghai|
    //  |   2|   Tim|     2|  2|Guangzhou|
    //  +----+------+------+---+---------+

    spark.sql("select p.id, p.name, c.name " +
      "from person p right join cities c " +
      "on p.cityId = c.id")
      .show()
  }

  /**
   *
   * left_anti : 左反连接 结果集中没有右侧的数据, 只显示左侧未连接上的数据
   * left_semi : 左半连接 结果集中没有右侧的数据, 只显示左侧连接上的数据
   * -
   * `left_semi`, `left_anti`
   * --
   * -- left semi join
   * SELECT * FROM emp LEFT SEMI JOIN dept ON emp.deptno = dept.deptno
   * -- 等价于如下的 IN 语句
   * SELECT * FROM emp WHERE deptno IN (SELECT deptno FROM dept)
   * ###
   * -- left anti join
   * SELECT * FROM emp LEFT ANTI JOIN dept ON emp.deptno = dept.deptno
   * -- 等价于如下的 IN 语句
   * SELECT * FROM emp WHERE deptno NOT IN (SELECT deptno FROM dept)
   */
  @Test
  def leftAntiAndLeftSemi(): Unit = {
    // 左连接anti
    person.join(cities, person.col("cityId") === cities.col("id"),
      joinType = "left_anti")
      .show()
    //  +---+-------+------+
    //  | id|   name|cityId|
    //  +---+-------+------+
    //  |  4|hliushi|     3|
    //  +---+-------+------+

    spark.sql("select p.id, p.name " +
      "from person p left anti join cities c " +
      "on p.cityId = c.id")
      .show()

    // 左连接semi
    person.join(cities, person.col("cityId") === cities.col("id"),
      joinType = "left_semi")
      .show()
    //  +---+------+------+
    //  | id|  name|cityId|
    //  +---+------+------+
    //  |  0|  Lucy|     0|
    //  |  1|  Lily|     0|
    //  |  2|   Tim|     2|
    //  |  3|Danial|     0|
    //  +---+------+------+

    spark.sql("select p.id, p.name " +
      "from person p left semi join cities c " +
      "on p.cityId = c.id")
      .show()
  }
}