package org.example

import org.apache.spark.sql.SparkSession
import org.apache.spark.sql.types._

object SQL {
  def main(args: Array[String]): Unit = {
    // 创建spark运行环境
    val spark = SparkSession
      .builder()
      .master("local[*]")
      .appName("spark")
      .getOrCreate()
    val sc = spark.sparkContext
  // RDD读取
  //  val scoreRDD = sc.textFile("src/main/resources/score.txt")
  //  scoreRDD.foreach(println)
  // SQL 读取   textFile只能读取一个字段
  //  val scoreSQL = spark.read.textFile("src/main/resources/score.txt")


  //   输出数据模式（列名和类型）
  //  scoreFrame.printSchema()
  // 查看数据
  // scoreFrame.show()
  // 设计表的结构（列名）和数据类型
    val schemaScore = StructType(Seq(
      StructField("id",IntegerType,nullable = false),
      StructField("name",StringType,nullable = true),
      StructField("course",StringType,nullable = true),
      StructField("score",IntegerType,nullable = true)
    ))
    val scoreFrame = spark.read.csv("src/main/resources/score.txt")

  // 取别名
  val scoreNewName = scoreFrame.toDF("stu_id","stu_name","stu_course","stu_score")
  // 输出数据模式
//  scoreFrame.printSchema()
  // 查看数据
//  scoreFrame.show()
 // scoreNewName.show()

 // json 格式数据读取与SQL操作
 val schema = StructType(Seq(
   StructField("id", IntegerType, nullable = false),  //学号
   StructField("name", StringType, nullable = true),   //姓名
   StructField("age", IntegerType, nullable = true),     //科目
   StructField("school",DataTypes.createStructType(Array(StructField("schoolname",StringType),StructField("time",StringType)))),
   StructField("score", DataTypes.createArrayType(IntegerType), nullable = true)    //成绩
 ))
 val data = spark.read.schema(schema).json("src/main/resources/json.txt")
    data.printSchema()
    data.show()
// 用SQL求清职院毕业学生的平均年龄
    data.createOrReplaceTempView("json")
    val res1 = spark.sql(
      """
        |select
        | avg(age) as avg_age
        |from json
        |where school.schoolname = '清职院'
        |""".stripMargin)
    res1.show()
    sc.stop()
  }
}
  //  创建临时表
  /*  scoreFrame.createTempView("score")
  //  SQL语句  求最高分、总分、平均分
    val res1 = spark.sql(
      """
        |select
        | _c2 as course,
        | max(_c3) as max_score,
        | sum(_c3) as sum_score,
        | avg(_c3) as avg_score
        | from score
        | group by _c2
        |""".stripMargin)
    res1.show()
  // 大于90分同学名字
    val res2 = spark.sql(
      """
        |select
        | _c1 as name
        | from score
        | where _c3 > 90
        |""".stripMargin)
    res2.show()*/
  //
   /* val scoreYun2 = spark.read
      .option("encoding","GBK")
      .option("header","true")
      .csv("src/main/resources/23yun2.csv")
    scoreYun2.show()

    scoreYun2.printSchema()
    scoreYun2.createOrReplaceTempView("scoreYun2")
    */


