package org.example

import org.apache.spark.sql.SparkSession

object SQL {
  def main(args: Array[String]): Unit = {
    val spark = SparkSession
      .builder()
      .master("local[*]")
      .appName("spark")
      .getOrCreate()
    val sc = spark.sparkContext
    //RDD读取
    val scoreRDD = sc.textFile("src/main/resources/score.txt")
    scoreRDD.foreach(println)
    //sql读取
    val scoreSQL = spark.read.textFile("src/main/resources/score.txt")
    val scoreFrame = spark.read.csv("src/main/resources/score.txt")
    //输出数据的模式（列名和类型）
    scoreSQL.printSchema()
    //查看数据
    scoreFrame.show()
    //临时表
    scoreFrame.createTempView("score")
    //sql语句
    val  res1 = spark.sql(
      """
        |select
        |  _c2 as course,
        |  max(_c3) as max_score,
        |  sum(_c3) as sum_score,
        |  avg(_c3) as avg_score
        |  from score
        |  group by _c2
        |""".stripMargin)
    res1.show()
    // 大于90的同学名字
    val res2 = spark.sql(
      """
        |select
        | _c1 as name
        | from score
        | where _c3 > 90
        |""".stripMargin)
    res2.show()
    // 随堂练习：求我们班平时成绩的最高分、平均分和不及格人数
    val scoreYun2 = spark.read.option("encoding","GBK").option("header","true").csv("src/main/resources/23yun2.csv")
    scoreYun2.show()
    scoreYun2.createTempView("score_yun2")
    val res3 = spark.sql(
      """
        |select
        | max(cj) as max_score,
        | avg(cj) as avg_score
        | from score_yun2
        |""".stripMargin)
    res3.show()
    val res4 = spark.sql(
      """
        |select count(*) as bjgrs
        |from score_yun2
        |where cj < 60
        |""".stripMargin)
    res4.show()
    sc.stop()
  }
}