package com.study.spark.scala.dataframe

import org.apache.spark.sql.{Dataset, SparkSession}

/**
  * DataFrame Join
  *
  * @author stephen
  * @create 2019-03-17 12:14
  * @since 1.0.0
  */
object DataFrameJoinDemo {

  def main(args: Array[String]): Unit = {
    val spark = SparkSession.builder()
      .master("local[2]")
      .appName("DataFrameJoinDemo")
      .getOrCreate()
    import spark.implicits._
    // 第一份数据
    val uDS: Dataset[String] = spark.createDataset(List("1,zhangsan,china", "2,lisi,usa", "3,wangwu,jp"))
    // 对数据进行整理
    val userDS: Dataset[(Long, String, String)] = uDS.map(line => {
      val fields = line.split(",")
      (fields(0).toLong, fields(1), fields(2))
    })
    val userDF = userDS.toDF("id", "name", "nation")

    // 第二份数据
    val cDS = spark.createDataset(List("china,中国", "usa,美国", "jp,日本"))
    val nationDS = cDS.map(line => {
      val fields = line.split(",")
      (fields(0), fields(1))
    })
    val nationDF = nationDS.toDF("ename", "cname")

    // 方式一：sql
    //userDF.createOrReplaceTempView("user")
    //nationDF.createOrReplaceTempView("nation")
    //val r = spark.sql("SELECT id, name, cname FROM user LEFT JOIN nation ON nation=ename")

    // 方式二：api
    val r = userDF.join(nationDF, $"nation" === $"ename", "left_outer").select("id","name","cname")

    r.show(true)
  }
}
