package spark.sql

import org.apache.spark.sql.{DataFrame, Dataset, SparkSession}

/**
 * @Author Jeremy Zheng
 * @Date 2021/3/29 12:01
 * @Version 1.0
 */
object SparkSQL05_join_Demo1 {
  def main(args: Array[String]): Unit = {

    val spark: SparkSession = SparkSession.builder().appName("select_Demo1")
      .master("local[*]")
      .getOrCreate()
    import spark.implicits._

    val ds1: Dataset[String] = spark.read.textFile("dataSet/name.txt")

    val ds2: Dataset[(Int, Int)] = ds1.map(line => {
      val strings: Array[String] = line.split(" ")
      (strings(0).toInt, strings(1).toInt)
    })

    val df1: DataFrame = ds2.toDF("uid", "age")
    //df1.show()

    val df2: DataFrame = spark.read.json("dataSet/test2.json")
    //df2.show()

    //创建两张临时表
    df1.createOrReplaceTempView("t_name")
    df2.createOrReplaceTempView("t_json")

    //SQL风格实现连接查询
   // spark.sql("select * from t_name t1 left join t_json t2 on t1.id = t2.id").show()
   // spark.sql("select t1.id,name,age from t_name t1 left join t_json t2 on t1.id = t2.id").show()

    //DSL风格实现连接查询
    //关联字段的名称一致：
    //val joindf1: DataFrame = df1.join(df2, "id")
    //joindf1.show()

    //关联字段的名称不一致：
    val joindf: DataFrame = df1.join(df2, 'uid === 'id)
    joindf.createOrReplaceTempView("t_join")
    spark.sql("select id,name,age from t_join").show()


    //关闭资源
    spark.close()
  }
}
