package com.txl.cn.spark06

import org.apache.spark.sql.{DataFrame, Dataset, SparkSession}
import org.apache.spark.{SparkConf, SparkContext}

/**
  * Created by txl on 2018/1/3.
  */
object DataSetDemo {
  def main(args: Array[String]): Unit = {
     val session = SparkSession.builder()
       .master("local")
       .appName("DataSetDemo")
       .getOrCreate()
    import session.implicits._
    val person: Dataset[String] = session.createDataset(List("zs 32 sx","ls 25 sd","ww 28 hn","zl 30 bj"))
    val area = session.createDataset(List("sx 山西","sd 山东","hn 河南","bj 北京"))

    val Pdframe: DataFrame = person.map({
      t =>
        val str = t.split(" ")
        val name = str(0)
        val age = str(1)
        val add = str(2)
        (name, age, add)
    }).toDF("name", "age", "add")
    val AreaFrame=area.map({
      t=>
        val str = t.split(" ")
        val add=str(0)
        val address= str(1)
        (add,address)
    }).toDF("add1","address")
   /* Pdframe.createTempView("p")
    AreaFrame.createTempV   createTempView("a")
    val res = session.sql("select * from p join a on p.add=a.add")*/

   // DSL方式
    import org.apache.spark.sql.functions._
    val joinDf1 = Pdframe.join(AreaFrame)
    val res=joinDf1.where($"add"===$"add1")
    res.show()
  }

}
