package org.example

import org.apache.spark.sql.{SparkSession, types}
import org.apache.spark.sql.types.{DataTypes, StructField, StructType}

object data1_SQL2 {
  def main(args: Array[String]): Unit = {
    val spark = SparkSession
      .builder
      .master("local[*]")
      .appName("spark")
      .getOrCreate()
    val sc = spark.sparkContext
    val dataSet1=spark.read.csv("src/main/resources/data.txt")
    dataSet1.printSchema()
    dataSet1.show()
//    一、数据类型 定义表结果  定义数据表头及类型
    val schema1=StructType(Seq(
      StructField("id",DataTypes.IntegerType),
      StructField("name",DataTypes.StringType),
      StructField("course",DataTypes.StringType),
      StructField("score",DataTypes.IntegerType)
    ))
//    如果数据没有结构信息可以自定义
    val dataSet2=spark.read.schema(schema1).csv("src/main/resources/data.txt")
    dataSet2.printSchema()
    dataSet2.show()
//    二、表头
//    如果有（列名）则设置option读取
    val dataSet3=spark.read.option("header","true").csv("src/main/resources/data.txt")
    dataSet3.printSchema()
    dataSet3.show()
//    如果是中文名，直接取别名
    val dataSet4=dataSet3.toDF("st_id","st_name","st_course","st_score")
    dataSet4.show()
//    读取json格式数据并通过sql检索
    val jsonSchema=StructType(Seq(
      StructField("id", DataTypes.IntegerType),
      StructField("name", DataTypes.StringType),
      StructField("school",DataTypes.createStructType(
        Array(StructField("schoolName",DataTypes.StringType),StructField("time",DataTypes.StringType)))),
      StructField("age",DataTypes.IntegerType),
      StructField("score",DataTypes.createArrayType(DataTypes.IntegerType)),
    ))
    val dataJSON=spark.read.schema(jsonSchema).json("src/main/resources/json.txt")
    dataJSON.printSchema()
    dataJSON.show()


    sc.stop()
  }
}
