package org.example

import org.apache.spark.sql.{SparkSession, types}
import org.apache.spark.sql.types.{DataTypes, IntegerType, StringType, StructField, StructType}

object SQL {
  def main(args: Array[String]): Unit = {
    // 创建spark运行环境
    val spark = SparkSession
      .builder()
      .master("local[*]")
      .appName("spark")
      .getOrCreate()
    val sc = spark.sparkContext

    // 设置表的结构（列名）和数据类型
    val schemaScore = StructType(Seq(
      StructField("id", IntegerType, nullable = false),
      StructField("name", StringType, nullable = true),
      StructField("course", StringType, nullable = true),
      StructField("score", IntegerType, nullable = true)
    ))

    val scoreFrame = spark.read.schema(schemaScore).csv("src/main/resources/score.txt")

    // 取别名
    val scoreNewName = scoreFrame.toDF("stu_id", "stu_name", "stu_course", "stu_score")

    // 输出数据模式（列名和类型）
    // scoreFrame.printSchema()
    // // 查看数据
    // scoreFrame.show()
    // scoreNewName.show()

    // json格式数据读取与SQL操作
    val schema = StructType(Array(
      StructField("id", IntegerType, nullable = false), // 学号（非空）
      StructField("name", StringType, nullable = true), // 姓名
      StructField("age", StringType, nullable = true), // 科目
      StructField("school", DataTypes.createStructType(Array(StructField("schoolName",StringType),StructField("time",StringType)))),
      StructField("score", DataTypes.createArrayType(IntegerType), nullable = true) // 成绩
    ))

    val data = spark.read.schema(schema).json("src/main/resources/json.txt")
    data.printSchema()
    data.show()
    // 创建DataFrame
    val df = data

    // 注册为临时视图
    df.createOrReplaceTempView("students")

    // 使用SQL查询清职院毕业学生的平均年龄
    val avgAgeQuery = spark.sql(
      """
        |SELECT AVG(age) as average_age
        |FROM students
        |WHERE school.schoolName = '清职院'
          """.stripMargin)

    // 显示结果
    avgAgeQuery.show()

    sc.stop()
  }
}