package org.example

import org.apache.spark.sql.SparkSession

object sql9 {
  def main(args: Array[String]): Unit = {
    val spark = SparkSession
      .builder()
      .master("local[*]")
      .appName("spark")
      .getOrCreate()

    import spark.implicits._

    // 读取CSV文件
    val sparkScore = spark.read.option("encoding", "GBK").option("header", "true").csv("src/main/resources/23yun2.csv")

    // 显示数据结构
    sparkScore.printSchema()

    // 显示数据
    sparkScore.show()

    // 创建临时视图
    sparkScore.createTempView("score")

    // 计算平时成绩平均分和最高分
    val res1 = spark.sql(
      """
        |select
        | _c2 as term,
        | avg(_c3) as avg_score,
        | max(_c3) as max_score
        | from score
        | group by _c2
      """.stripMargin)
    res1.show()

    // 计算小于60分的同学数量和姓名
    val res2 = spark.sql(
      """
        |select
        | _c1 as name
        | from score
        | where _c3 < 60
      """.stripMargin)
    res2.show()

    // 计算小于60分的同学数量
    val countRes2 = res2.count()

    // 显示小于60分的同学姓名
    res2.show()

    // 打印小于60分的同学数量
    println(s"Number of students with score less than 60: $countRes2")

    spark.stop()
  }
}