package org.example

import org.apache.spark.sql.{SparkSession, types}
import org.apache.spark.sql.types.{IntegerType, StringType, StructField, StructType}

object SQL2 {
  def main(args: Array[String]): Unit = {
    val spark = SparkSession
      .builder()
      .master("local[*]")
      .appName("spark")
      .getOrCreate()
    val sc = spark.sparkContext

    val schemaScore = StructType(Seq(
      StructField("id",IntegerType,nullable = false),
      StructField("name",StringType,nullable = true),
      StructField("course",StringType,nullable = true),
      StructField("score",IntegerType,nullable = true)
    ))
    val scoreFrame = spark.read.schema(schemaScore).csv("src/main/resources/score.txt")
    val scoreNewName = scoreFrame.toDF("id","name","course","score")
    //scoreFrame.printSchema()
    //scoreFrame.show()
    //scoreNewName.show()

    val schema = StructType(Array(
    StructField("id", IntegerType, nullable = false),
    StructField("name", StringType, nullable = true),
    StructField("school", StringType, nullable = true),
    StructField("time", StringType, nullable = true),
    StructField("age", IntegerType, nullable = true),
    StructField("score", IntegerType, nullable = true)
    ))

    val data = spark.read.schema(schema).json("src/main/resources/json.txt")
    data.createOrReplaceTempView("students")

    val result = spark.sql(
      """
        |select
        |avg(age) as avg_age
        |from students
        |where school.schoolName = '清职院'
        |""".stripMargin)
    result.show()

    sc.stop()
  }
}