package org.example

import org.apache.spark.sql.SparkSession
import org.apache.spark.sql.functions._

object txt {
  def main(args: Array[String]): Unit = {
    val spark = SparkSession
      .builder()
      .master("local[*]")
      .appName("spark")
      .getOrCreate()
    val sc = spark.sparkContext

    val scoreRDD =sc.textFile("src/main/resources/score.txt")
    scoreRDD.foreach(println)

    val scoreSQL =spark.read.textFile("src/main/resources/score.txt")
    val  scoreFrame =spark.read.csv("src/main/resources/score.txt")

    scoreSQL.printSchema()
    scoreFrame.show()
    scoreFrame.createTempView("score")

    val scoreYun2 =
      spark.read.option("encoding","GBK").option("header","true").csv("src/main/resources/23yun2.csv")
      scoreYun2.printSchema()
      scoreYun2.show()
    scoreYun2.createTempView("scores")


    val results =spark.sql(
      """
        |SELECT
        | AVG("平时成绩") AS average_score,
        | MAX("平时成绩") AS max_score,
        | COUNT(CASE WHEN "平时成绩" <60 THEN "学生姓名" END) AS failing_students_count,
        | COLLECT_LIST(CASE WHEN "平时成绩" <60 THEN "学生姓名" END) AS  failing_students_names
        | FROM scores
        |""".stripMargin)


    sc.stop()
  }
}
