package org.example
import org.apache.spark.sql.SparkSession
import org.apache.spark.sql.types.{DataTypes, IntegerType, StringType, StructField, StructType}

object sparkData2_SQL2 {
  def main(args: Array[String]): Unit = {
    val spark = SparkSession.builder
      .appName("Spark SQL Example")
      .master("local[*]")
      .config("spark.sql.warehouse.dir","E:\\yhm\\新建文件夹\\sparkData/spark-warehouse")
      .enableHiveSupport()
      .getOrCreate()
    val sc =spark.sparkContext
    //读取json格式数据 1.定义表结构
    val schemaJson =StructType(Seq(
    StructField("id",IntegerType,nullable=false),
      StructField("name",StringType),
      StructField("school",DataTypes.createStructType(
        Array(StructField("schoolName",StringType),
          StructField("time",StringType)))),
      StructField("age",IntegerType),
      StructField("score",DataTypes.createArrayType(IntegerType))
    ))
      val scoreJson=  spark.read.schema (schemaJson)
      .json ("src/main/resources/json.txt")
       scoreJson.printSchema()
       scoreJson.show()
       scoreJson.createTempView("student")
       val result =spark.sql(
      """
      |select
      | school.schoolName as school_name,
      | avg (age)= as avg_age
      | from student
        | group by school.schoolName
      |""" .stripMargin)
      result.show()
    sc.stop()
  }
}
