package com.sugon.ww

import org.apache.spark.sql.{DataFrame, Dataset, SparkSession}

object JoinDemo {
  def main(args: Array[String]): Unit = {
    //1.创建sparkSession
    val sparkSession: SparkSession = SparkSession.builder().appName("JoinDemo")
      .master("local[2]").getOrCreate()

    import sparkSession.implicits._
    //2.直接创建dataSet
    val datas1: Dataset[String] = sparkSession.createDataset(List(
      "1 tony 18",
      "2 reba 22",
      "3 mimi 20"
    ))

    //3.整理数据
    val dataDS1: Dataset[(Int, String, Int)] = datas1.map(x => {
      val fields: Array[String] = x.split(" ")
      val id = fields(0).toInt
      val name = fields(1).toString
      val age = fields(2).toInt

      //元组输出
      (id, name, age)
    })

    val dataDF1: DataFrame = dataDS1.toDF("id", "name", "age")


    //4.创建第二份DataSet
    val datas2: Dataset[String] = sparkSession.createDataset(List(
      "18 young",
      "22 old"
    ))

    //5.切分数据
    val dataDS2: Dataset[(Int, String)] = datas2.map(x => {
      val fields: Array[String] = x.split(" ")
      val age = fields(0).toInt
      val desc: String = fields(1).toString

      //元组输出
      (age, desc)
    })

    //6.转换为DataFrame
    val dataDF2: DataFrame = dataDS2.toDF("dage", "desc")

    /**
      * sql方式
      **/
    //7.注册视图
//    dataDF1.createTempView("d1_t")
//    dataDF2.createTempView("d2_t")

    //8.写sql（join）
//    val r: DataFrame = sparkSession.sql("select name,desc from d1_t join d2_t on d1_t.age = d2_t.age ")
//
//    //9.触发任务
//    r.show()

    /**
      * DSL方式
      **/
    //val r: DataFrame = dataDF1.join(dataDF2,$"age" === $"dage","left")
    //val r: DataFrame = dataDF1.join(dataDF2,$"age" === $"dage","left_outer")
//    val r: DataFrame = dataDF1.join(dataDF2,$"age" === $"dage","right")
    val r: DataFrame = dataDF1.join(dataDF2,$"age" === $"dage","right_outer")
        r.show()

  }
}


