package org.example

import org.apache.spark.sql.SparkSession

object sparksql09 {
  def main(args: Array[String]): Unit = {
    val spark = SparkSession
      .builder()
      .master("local[*]")
      .appName("spark")
      .getOrCreate()

    val sc = spark.sparkContext
    val scoreRDD = sc.textFile("src/main/resources/score.txt")
    scoreRDD.foreach(println)
    val scoreSQL = spark.read.textFile("src/main/resources/score.txt")
    val scoreFrame = spark.read.csv("src/main/resources/score.txt")
    scoreFrame.printSchema()
    scoreFrame.show()
    scoreFrame.createTempView("score")
    val res1 = spark.sql(
      """
        |select
        | _c2 as term,
        | avg(_c3) as avg_score,
        | max(_c3) as max_score,
        | avg(_c3) as avg_score
        | from score
        | group by _c2
        |""".stripMargin)
    res1.show()

    val res2 = spark.sql(
      """
        |select
        | _c1 as name
        | from score
        | where _c3 < 60
        |""".stripMargin)
    res2.show()
    val sparkScore = spark.read.option("encoding", "GBK").option("header", "true").csv("src/main/resources/23yun2.csv")
    // 显示数据结构
    sparkScore.printSchema()
    sparkScore.show()
    val countLowScore = sparkScore.count()
    println(s"Number of students with score less than 60: $countLowScore")


    sc.stop()
  }

}
