package org.example

import org.apache.spark.sql.SparkSession
import org.apache.spark.sql.types.{DataType, DataTypes, IntegerType, StringType, StructField, StructType}

object ketang5 {
  def main(args: Array[String]): Unit = {
    val spark = SparkSession.builder().master("local[*]").appName("spark").getOrCreate()
    val sc = spark.sparkContext
    /*val scoreFrame =spark.read.csv("src/main/resources/score.txt")
    scoreFrame.printSchema()
    scoreFrame.show()*/
    val schemaScore=StructType(Seq(
      StructField("id",IntegerType,nullable = false),
      StructField("name",StringType,nullable =true),
      StructField("course",StringType,nullable =true),
      StructField("score",IntegerType,nullable = true)
    ))
    val scoreFrame = spark.read.schema(schemaScore).csv("src/main/resources/score.txt")
    //取别名
    val scoreNewName=scoreFrame.toDF("stu_id","stu_name","stu_course","stu_score")
    /*scoreFrame.printSchema()
    scoreFrame.show()
    scoreNewName.show()*/
    //json格式数据读取与SQL操作
    val schema=StructType(Array(
      StructField("id", IntegerType, nullable = false),//学号 非空
      StructField("name", StringType, nullable = true),//姓名
      StructField("age", StringType, nullable = true),//科目
      StructField("school",DataTypes.createStructType(Array(StructField("schoolName",StringType),StructField("time",StringType)))),
      StructField("score", DataTypes.createArrayType(IntegerType),nullable = true)//成绩
    ))
    val data = spark.read.schema(schema).json("src/main/resources/json.txt")
    data.printSchema()
    data.show()
    data.createTempView("json")
    val res3=spark.sql(
      """
        |select
        | AVG(age) as avg_age
        | from json
        | WHERE school.schoolName='清职'
        |""".stripMargin
    )
    res3.show()
    sc.stop()
  }

}
