package test

import org.apache.spark.sql.SparkSession
import org.apache.spark.sql.functions.{asc, desc}
import org.apache.spark.sql.types._

object ii {
  def main(args: Array[String]): Unit = {
    val spark = SparkSession
      .builder()
      .master("local[*]")
      .getOrCreate()
    val sc = spark.sparkContext
    //读取json格式数据  1.定义表结构
    val schemaJson = StructType(Seq(
      StructField("id",IntegerType,nullable = false),
      StructField("name",StringType),
      StructField("school",DataTypes.createStructType(
        Array(StructField("schoolName",StringType),
           StructField("time",StringType)))),
      StructField("age",IntegerType),
      StructField("score",DataTypes.createArrayType(IntegerType))
    ))
      val studentJson = spark.read.schema(schemaJson).json("src/main/resources/json.txt")
    studentJson.printSchema()
    studentJson.show()
    studentJson.createTempView("student")
    val result = spark.sql(
      """
        |select
        | school.schoolName as school_name,
        | avg(age) as avg_age
        | from student
        | group by school.schoolName
        |""".stripMargin
    )
    result.show()

    //表结构读取电影用户数据
   val schemaUser = StructType(Array(
     StructField("id",IntegerType),
     StructField("gender",StringType),
     StructField("age",IntegerType),
     StructField("occupation",IntegerType),
     StructField("zipcode",StringType)
   ))
    val MovieUser = spark.read
      .option("sep","::")
      .schema(schemaUser)
      .csv("src/main/resources/users.dat")
    MovieUser.show(3)

    //查询  where/filter
    val userWhere = MovieUser.where("gender= 'F' and age = 18")
    MovieUser.filter("gender = 'F' and age =18").show()
    userWhere.show(10)

    //排序 orderBy/sort
    MovieUser.orderBy(desc("id")).show(5)
    MovieUser.sort(asc("id")).show()

    // 分组groupBy
    MovieUser.groupBy("gender").count().show()

    sc.stop()
  }
}
