package org.example

import org.apache.spark.sql.SparkSession
import org.apache.spark.sql.functions._
import org.apache.spark.sql.types.{IntegerType, StringType, StructField, StructType,DoubleType,LongType}

object sql4 {
  def main(args: Array[String]): Unit = {
    val spark = SparkSession
      .builder()
      .master("local[*]")
      .appName("spark")
      .getOrCreate()
    import spark.implicits._

    // 定义Schema
    val userSchema = StructType(Seq(
      StructField("id", IntegerType),
      StructField("gender", StringType),
      StructField("age", IntegerType),
      StructField("occupation", IntegerType),
      StructField("location", StringType)
    ))

    val ratingSchema = StructType(Seq(
      StructField("userId", IntegerType),
      StructField("movieId", IntegerType),
      StructField("rating", DoubleType),
      StructField("timestamp", LongType)
    ))

    val movieSchema = StructType(Seq(
      StructField("movieId", IntegerType),
      StructField("title", StringType),
      StructField("genres", StringType)
    ))

    // 读取数据
    val userDF = spark.read.option("sep", "::").schema(userSchema)
      .csv("src/main/resources/users.dat")

    val ratingDF = spark.read.option("sep", "::").schema(ratingSchema)
      .csv("src/main/resources/ratings.dat")

    val movieDF = spark.read.option("sep", "::").schema(movieSchema)
      .csv("src/main/resources/movies.dat")


    // 注册UDF
    spark.udf.register("replaceGender", (x: String) => {
      x match {
        case "M" => 0
        case "F" => 1
      }
    })

    val joinedDF = ratingDF.join(userDF, ratingDF("userId") === userDF("id"))
      .join(movieDF, ratingDF("movieId") === movieDF("movieId"))

    // 显示连接结果
    joinedDF.show(5)

    // 查询18岁女生评分为5分的所有电影名称
    val result = userDF.join(ratingDF, userDF("id") === ratingDF("userId"))
      .filter($"age" === 18 && $"gender" === "F" && $"rating" === 5)
      .join(movieDF, ratingDF("movieId") === movieDF("movieId"))
      .select(movieDF("title"))

    result.show(truncate = false)

    spark.stop()
  }
}