package org.example

import org.apache.spark.sql.{SparkSession, types}
import org.apache.spark.sql.types.{DataType, DataTypes, IntegerType, StringType, StructField, StructType}

object sparkData2_SQL3 {
  def main(args: Array[String]): Unit = {
    val spark = SparkSession
      .builder()
      .master("local[*]")
      .getOrCreate()
    val sc = spark.sparkContext
    //  读取json格式数据 1.定义表结构
    val schemaJson = StructType(Seq(
      StructField("id", IntegerType, nullable = false),
      StructField("name", StringType),
      StructField("school",DataTypes.createStructType(
      Array(StructField("schoolName", StringType),
          StructField("time", StringType)))),
      StructField("age", IntegerType),
      StructField("score", DataTypes.createArrayType(IntegerType))
    ))
    val studentJson = spark.read.schema(schemaJson)
      .json("src/main/resources/json.txt")
    studentJson.printSchema()
    studentJson.show()
    studentJson.createTempView("student")

    val result = spark.sql(
    """
      |select
      |  school.schoolName as school_name,
      |  avg(age) as avg_age
      |  from student
      |  group by school.schoolName
      |""".stripMargin)

    result.show()
    //   求清职院毕业学生平均分
    //   表结构读取电影用户数据
    sc.stop()
  }

}
