package org.example

import org.apache.spark.sql.SparkSession
import org.apache.spark.sql.functions.desc
import org.apache.spark.sql.types.{DataTypes, IntegerType, StringType, StructField, StructType}

object data1_SQL3 {
  def main(args: Array[String]): Unit = {
    val spark = SparkSession
      .builder
      .master("local[*]")
      .appName("spark")
      .getOrCreate()
    val sc = spark.sparkContext
    val schemaUser = StructType(Seq(
      StructField("id", IntegerType),
      StructField("gender", StringType),
      StructField("age", IntegerType),
      StructField("occupation", IntegerType),
      StructField("zipcode", StringType),
    ))
    val user = spark.read.option("sep","::").schema(schemaUser)
      .csv("src/main/resources/users.dat")
    user.show(5)
    user.where("gender = 'F' and age = 18").show(5)
    user.filter("gender = 'F' and age = 18").show(5)
    spark.udf.register("replace", (x:String) => {
      x match {
        case "M" => 0
        case "F" => 1
      }
    })
    user.selectExpr("id","replace(gender) as sexual","age").show(3)
//    user.select(user.col("zipcode")).collect().foreach(println)
//    user.select(user.apply("zipcode")).collect().foreach(println)
//    user.orderBy(desc("id")).show(5)
      user.orderBy(-user("id")).show(5)
      user.sort(desc("id")).show(5)

      user.groupBy("gender").count().show()
    sc.stop()
  }
}
