package org.example

import org.apache.spark.sql.SparkSession

object sql {
  def main(args: Array[String]): Unit = {
    // 创建spark运行环境
    val spark = SparkSession
      .builder()
      .master("local[*]")
      .appName("spark")
      .getOrCreate()
    val sc = spark.sparkContext

    // RDD读取
    val scoreRDD = sc.textFile("src/main/resources/score.txt")
    scoreRDD.foreach(println)

    // SQL读取 textFile只能读取为一个字段
    val scoreSQL = spark.read.textFile("src/main/resources/score.txt")
    val scoreFrame = spark.read.csv("src/main/resources/score.txt")

    // 输出数据模式（列名和类型）
    scoreFrame.printSchema()

    // 查看数据
    scoreFrame.show()

    // 创建临时表
    scoreFrame.createTempView("score")

    // SQL语句 求最高分、总分和平均分
    val res1 = spark.sql(
      """
        |select
        |_c2 as course,
        |max(_c3) as max_score,
        |sum(_c3) as sum_score,
        |avg(_c3) as avg_score
        |from score
        |group by _c2
        |""".stripMargin)
    res1.show()

    // 大于90分的同学名字
    val res2 = spark.sql(
      """
        |select
        |_c1 as name
        |from score
        |where _c3 > 90
        |""".stripMargin)
    res2.show()

    // 随堂练习：求我们班平时成绩的平均分、最高分以及小于60分的同学数量
    val scoreYun2 = spark.read.option("encoding", "GBK").option("header", "true").csv("src/main/resources/23yun2.csv")
    scoreYun2.show()

    // 创建临时表
    scoreYun2.createTempView("scoreYun2")

    // 求平时成绩的平均分、最高分以及小于60分的同学数量
    val res3 = spark.sql(
      """
        |SELECT
        |AVG(`平时成绩`) AS avg_grade,
        |MAX(`平时成绩`) AS max_grade,
        |SUM(CASE WHEN `平时成绩` < 60 THEN 1 ELSE 0 END) AS fail_count
        |FROM scoreYun2
        |""".stripMargin)
    res3.show()

    // 获取平时成绩小于60分的同学姓名
    val res4 = spark.sql(
      """
        |SELECT
        |`学生姓名`
        |FROM scoreYun2
        |WHERE `平时成绩` < 60
        |""".stripMargin)
    res4.show()

    sc.stop()
  }
}