package org.example

import org.apache.spark.sql.types.{DataTypes, StructField, StructType}
import org.apache.spark.sql.{DataFrame, SparkSession}

object data1_SQL2 {
  def main(args: Array[String]): Unit = {
    val spark = SparkSession
      .builder
      .master("local[*]")
      .appName("spark")
      .getOrCreate()
    val sc = spark.sparkContext
    val dataSet1: DataFrame = spark.read.csv("src/main/resources/data.csv")
    //输出数据模式
    dataSet1.printSchema()
    //查看数据
    dataSet1.show()
    val schema1 = StructType(Seq(
      StructField("id", DataTypes.IntegerType),
      StructField("name", DataTypes.StringType),
      StructField("course", DataTypes.StringType),
      StructField("score", DataTypes.IntegerType)
    ))
    val dataSet2 = spark.read.schema(schema1).csv("src/main/resources/data.csv")
    dataSet2.printSchema()
    dataSet2.show()

    val dataSet3 = spark.read.option("header", "true").csv("src/main/resources/23data01.csv")
    dataSet3.printSchema()
    dataSet3.show()
    //取别名
    val dataSet4 = dataSet3.toDF("st_name","st_id","st_course","st_score")
    dataSet4.show()
    //读取json格式数据并通过SQL检索
val jsonSchema = StructType(Seq(
  StructField("name",DataTypes.StringType),
  StructField("id",DataTypes.IntegerType),
  StructField("school",DataTypes.createStructType(
    Array(StructField("schoolName",DataTypes.StringType),StructField("time",DataTypes.StringType)))),
  StructField("age",DataTypes.IntegerType),
  StructField("score",DataTypes.createArrayType(DataTypes.IntegerType))
  ))
    val dataJSON = spark.read.schema(jsonSchema).json("src/main/resources/json.txt")
    dataJSON.printSchema()
dataJSON.show()
    sc.stop()
  }
}
