package dataframe

import org.apache.spark.SparkConf
import org.apache.spark.broadcast.Broadcast
import org.apache.spark.sql.functions.{column, when}
import org.apache.spark.sql.types.{ArrayType, LongType, StructField, StructType}
import org.apache.spark.sql.{DataFrame, Dataset, Row, SparkSession}
import org.apache.spark.util.AccumulatorV2

object DataFrame_SQLTest {
  def main(args: Array[String]): Unit = {
    val conf = new SparkConf()
    conf.setMaster("local[*]")
    conf.setAppName("DataFrame_SQLTest")

    val spark: SparkSession = SparkSession
      .builder()
      .config(conf)
      .getOrCreate()
    import spark.implicits._

    spark.sql("select current_date() as today,1+100 as value").show()

    // 除了使用read API将文件加载到DataFrame并对其进行查询，Spark也可以使用SQL直接查询该文件
    spark.sql("select * from parquet.`data/users.parquet`").show()
    spark.sql("select * from json.`data/people.json`").show()
    spark
      .sql("select * from csv.`data/movies.csv`")
      .toDF("movieId","title","genres")
      .filter(!$"movieId".startsWith("movieId"))
      .show(false)

    // 注册临时视图并查询 - 使用SQL语句来查询电影数据集
    val parquetFileDF: DataFrame = spark.read.parquet("data/movies.parquet")
    // 现在将movies DataFrame注册为一个临时视图
    parquetFileDF.createOrReplaceTempView("movie")
    // 使用DSL语法进行查询
    parquetFileDF
      .where($"actor_name".contains("Jolie").and($"produced_year">2009))
      .show()
    // 从视图view查询
    spark
      .sql("select * from movie where actor_name like '%Jolie%' and produced_year>2009")
      .show()

    // 也可以在sql函数中混合使用SQL语句和DataFrame转换API
    // 查询电影数据集，找出参演影片超过30部的演员
    spark
      .sql("select actor_name,count(movie_title) movie_count from movie group by actor_name")
      .where($"movie_count">30)
      .orderBy($"movie_count".desc)
      .show()

    spark.sql(
      """
        |select actor_name,movie_count
        |from (
        |select actor_name,count(movie_title) as movie_count
        |from movie
        |group by actor_name) t1
        |where movie_count>30
        |order by movie_count desc
        |""".stripMargin).show()

    // 查询电影数据集，使用子查询来计算每年产生的电影数量
    spark.sql(
      """
        |select produced_year,count(movie_title) as movie_title_count
        |from (
        |select distinct movie_title,produced_year
        |from movie) t1
        |group by produced_year
        |order by movie_title_count desc
        |""".stripMargin)
      .show(5)

    spark.stop()
  }
}
