package com.doit.day07

import org.apache.spark.sql.{DataFrame, SparkSession}
import org.apache.spark.sql.types.{DataTypes, StructType}

/**
 * @Author:
 * @WX: 17710299606
 * @Tips: 学大数据 ,到多易教育
 * @DOC: https://blog.csdn.net/qq_37933018?spm=1000.2115.3001.5343
 * @Description:
 */
object Demo02Join {
  def main(args: Array[String]): Unit = {

        val session = SparkSession.builder()
              .appName("test")
              .master("local[*]")
              .getOrCreate()

    val structType = new StructType()
      .add("oid", DataTypes.StringType)
      .add("price", DataTypes.DoubleType)
      .add("city", DataTypes.StringType)
      .add("category", DataTypes.StringType)
      .add("id", DataTypes.StringType)
    // 加载订单
    val orderDF = session.read.schema(structType).csv("data/orders/order.csv")
    // 加载用户
    val structType2= new StructType()
      .add("uid", DataTypes.StringType)
      .add("name" , DataTypes.StringType)
      .add("age" , DataTypes.IntegerType)

    val userDF: DataFrame = session.read.schema(structType2).csv("data/orders/user.csv")

    // join
    val redDF = orderDF.join(userDF)   // 笛卡尔积
    //orderDF.join(userDF , "uid").show()  // 内关联  oder.uid = user.uid
    orderDF.join(userDF,orderDF("id") === userDF("uid")).show()

    /**
     * 参数1  DF
     * 参数2  等值字段
     * 参数3  关联类型  left  join   full
     */
    orderDF.join(userDF,orderDF("id") === userDF("uid") , "left").show()






  }

}
