package org.example

import com.sun.prism.PixelFormat.DataType
import org.apache.spark.sql.types.{DataTypes, StructField, StructType}
import org.apache.spark.sql.{DataFrame, SparkSession, types}

object data1_SQL2 {
  def main(args: Array[String]): Unit = {
    val spark = SparkSession
      .builder
      .master("local[*]")
      .appName("spark")
      .getOrCreate()
    val sc = spark.sparkContext
    // RDD读取
    val dataSet1: DataFrame = spark.read.csv("src/main/resources/daat.txt")
    dataSet1.printSchema()
    dataSet1.show()
    //  数据类型 数据表
    val schemal = StructType(Seq(
      StructField("id", DataTypes.IntegerType),
      StructField("name", DataTypes.StringType),
      StructField("course", DataTypes.StringType),
      StructField("score", DataTypes.IntegerType),
    ))
    //如果没有结构信息可以自定义
    val dataSet2 = spark.read.schema(schemal).csv("src/main/resources/daat.txt")
    dataSet2.printSchema()
    dataSet2.show()
    // 没有的话就设置option读取
    val dataSet3 = spark.read.option("header", "true").csv("src/main/resources/daat.txt")
    dataSet3.printSchema()
    dataSet3.show()
    //取别名
    val dataSet4 = dataSet3.toDF("st_id", "st_name", "st_course", "st_score")
    dataSet4.show()
    // 读取jsion模式数据并通过SQL检索

    val jsonSchem = StructType(Seq(
      StructField("id", DataTypes.IntegerType),
      StructField("name", DataTypes.StringType),
      StructField("school", DataTypes.createStructType(
        Array(StructField("school", DataTypes.StringType), StructField("time", DataTypes.StringType)))),
      StructField("age", DataTypes.IntegerType),
      StructField("score", DataTypes.createArrayType(DataTypes.IntegerType))
    ))

    val dataJSON = spark.read.schema(jsonSchem).json("src/main/resources/daat.txt")
    dataJSON.printSchema()
    dataJSON.show()





 //  表头
    sc.stop()
  }

}
