package org.example
import org.apache.spark.sql.SparkSession
import java.util.regex.Pattern
object classcode0417 {
  def main(args: Array[String]): Unit = {
    val spark = SparkSession
      .builer()
      .master("local[*]")
      .appName("spark")
      .getOrCreate()
    val sc = spark.sparkContext
    //RDD
    val scoreRDD = sc.textFile("src/main/resource/score.txt")
    scoreRDD.foreach(println)
    //sql读取
    val scoresQL = spark.read.textFile("src/main/resource/score.txt")
    val scoreFrame = spark.read.csv("src/main/resource/score.txt")
    //输出数据模式（列名和类型）
    scoresQL.printSchena()
    scoreSQL.show()
    scoreFrame.createTempView("score")
    val res1 = spark.sql(
      """
        |select
        | _c2 as course
        | max(_c3) as max_score,
        | max(_c3) as sun_score,
        | max(_c3) as avg_score,
        |from score
        | group by _c2
        |""".stripMargin
    )
    res1.show()
    val res2 = spar.sql(
      """
        | select
        | _c1 as name
        | from score
        | where _c3 > 90
        | """.stripMargin
    )
    res2.show()
    val scoreYun2 = spark.read
      .option("ending","true")
      .option("header","true")
      .csv("src/main/resources/23yun1.csv")
    scoreYun2.show()





    sc.stop()
  }

}
