package org.example
import org.apache.spark.rdd.RDD
import org.apache.spark.sql.types.{DataTypes, IntegerType, StringType,
  StructField, StructType}
import org.apache.spark.sql.{DataFrame, Dataset, SparkSession}
object sparksql {
  def main(args: Array[String]): Unit = {
    val spark = SparkSession
      .builder()
      .master("local[*]")
      .appName("sparkBase")
      .getOrCreate()
    val sc = spark.sparkContext
    // RDD读取
    val studentRDD: RDD[String] = sc.textFile("src/main/resources/studen.txt")
    // studentRDD.foreach(println)
    // SQL读取 textFile只有一个字段
    val studentSQL: Dataset[String] =
    spark.read.textFile("src/main/resources/studen.txt")
    // 输出数据模式（列名和类型）
    // studentSQL.printSchema()
    //// 查看数据
    // studentSQL.show()
    val schemaScore = StructType(Seq(
      StructField("id",IntegerType,nullable = true),
      StructField("username",StringType),
      StructField("course",StringType),
      StructField("score",IntegerType)
    ))
    val studentFrame: DataFrame =
      spark.read.schema(schemaScore).csv("src/main/resources/studen.txt")
    studentFrame.printSchema()
    studentFrame.show()
    // 一、确定表的结构(列名)和类型
    val schemaJson = StructType(Seq(
      StructField("id",IntegerType,nullable = true),
      StructField("name",StringType),
      StructField("school",DataTypes.createStructType(
        Array(StructField("schoolName", StringType),StructField("time",StringType)))),
      StructField("age",IntegerType),
      StructField("score",DataTypes.createArrayType(IntegerType))
    ))
    // 二、读取json格式数据并执行SQL操作
    val scoreJson = spark.read.schema(schemaJson).json("src/main/resources/json.txt")
    scoreJson.printSchema()
    scoreJson.show()
    scoreJson.createTempView("sj")
    val res5 = spark.sql(
      """
        |select
        | school.schoolName as school_name,
        | avg(age) as avg_age
        | from sj
        | group by school.schoolName
        |""".stripMargin
    )
    res5.show()

    //从网上爬取了视频的数据，筛选出视频播放量大于1亿的视频up主



    sc.stop()
  }
}

