package com.doit.day07

import org.apache.spark.sql.{DataFrame, SparkSession}
import org.apache.spark.sql.types.{DataTypes, StructType}

/**
 * @Author:
 * @WX: 17710299606
 * @Tips: 学大数据 ,到多易教育
 * @DOC: https://blog.csdn.net/qq_37933018?spm=1000.2115.3001.5343
 * @Description:
 */
object Demo07Plain {

  def main(args: Array[String]): Unit = {

    val session = SparkSession.builder()
      .appName("test")
      .master("local[*]")
      .getOrCreate()
    import session.implicits._
    import org.apache.spark.sql.functions._


    val structType = new StructType()
      .add("oid", DataTypes.StringType)
      .add("price", DataTypes.DoubleType)
      .add("city", DataTypes.StringType)
      .add("category", DataTypes.StringType)
      .add("uid", DataTypes.StringType)
    // 加载订单
    val orderDF = session.read.schema(structType).csv("data/orders/order.csv")
    // 加载用户
    val structType2 = new StructType()
      .add("uid", DataTypes.StringType)
      .add("name", DataTypes.StringType)
      .add("age", DataTypes.IntegerType)

    val userDF: DataFrame = session.read.schema(structType2).csv("data/orders/user.csv")

    orderDF.createTempView("tb_order")
    userDF.createTempView("tb_user")

    session.sql(
      """
        |select
        |t2.* ,
        |t3.*
        |from
        |(
        |select
        |uid ,
        |city,
        |category
        |from
        |(
        |select
        |*
        |from
        |tb_order
        |) t
        |where uid  > 1
        |)  t2
        |join
        |(
        |select
        |*
        |from
        |tb_user
        |where uid >0 and uid >1
        |)t3
        |on  t2.uid = t3.uid
        |where  t3.uid  > 1
        |
        |""".stripMargin).explain("extended")

  }
}
