package org.example

import org.apache.spark.sql.{SparkSession, types}
import org.apache.spark.sql.functions.desc
import org.apache.spark.sql.types.{IntegerType, StringType, StructField, StructType}

object sql3 {
  def main(args: Array[String]): Unit = {
    val spark = SparkSession
      .builder()
      .master("local[*]")
      .appName("spark")
      .getOrCreate()
    val sc = spark.sparkContext

    val schemaUser = StructType(Seq(
      StructField("id", IntegerType),
      StructField("gender", StringType),
      StructField("age", IntegerType),
      StructField("occupation", IntegerType),
      StructField("location", StringType)
    ))
     val  user = spark.read.option("sep","::").schema(schemaUser)
       .csv("src/main/resources/users.dat")
     user.show(5)
//查询替换where/filter udf
     user.where("gender = 'F' and age = 18").show(3)
     user.filter("gender = 'F' and age = 18").show(3)

     spark.udf.register("replace",(x:String) => {
       x match {
         case "M" => 0
         case "F" => 1
       }
     }
     )
    user.selectExpr("id","replace(gender) as sexual","age").show(3)
    user.select("id","age","location").show(3)

    user.orderBy(desc("age")).show(5)
    user.sort(-user("id")).show(5)

    //分组

    //

     sc.stop()
  }
}