package org.example
import org.apache.spark.sql.SparkSession

object classcode0418 {
  def main(args: Array[String]): Unit = {
    val spark = SparkSession
      .builer()
      .master("local[*]")
      .appName("spark")
      .getOrCreate()
    val sc = spark.sparkContext

    val schemaScore = StructType(Seq(
      StructField("id", IntegerType, nullable = false),
      StructField("name", StringType, nullable = ture),
      StructField("age", StringType, nullable = ture),
      StructField("school", DataTypes.createStructType(Array(StuctField("schoolName",StringType),StructField("time",StringType)))),
      StructField("score", DataTypes.createStructType(IntegerType), nullable = ture)
    ))
    val scoreFrame = spark.read.schema(schemaScore).csv("src/main/resources/score.txt")
    scoreFrame.toDF("stu_id", "stu_name","stu_course","stu_score")

    val schema = StructType(Array(StructField("id", IntegerType, nullable = false),
    StructField("name", StringType, nullable = true),
    StructField("course", StringType, nullable = true),
    StructField("score", IntegerType, nullable = true)
    ))

    val data = spark.read.schema(schema).json("src/main/resources/json.txt")
    data.printSchema()
    data.show()
    sc.stop()
    //val scoreFrame = printSchema()
    //scoreFrame.show()
    //scoreNewName.show()

  }

}
