package org.example

import org.apache.spark.sql.SparkSession
import org.apache.spark.sql.functions.col
import org.apache.spark.sql.types.{IntegerType, StringType, StructField, StructType}

object spark_zuoye {
  def main(args: Array[String]): Unit = {
    val spark = SparkSession
      .builder
      .master("local[*]")
      .appName("spark")
      .getOrCreate()
    val sc = spark.sparkContext
    // 读取用户数据
    val users = spark.read
      .option("sep", "::")
      .option("inferSchema", "true")
      .csv("src/main/resources/users.dat")
      .toDF("UserID", "gender", "age", "occupation", "zipcode")
    // 读取评分数据
    val ratings = spark.read
      .option("sep", "::")
      .option("inferSchema", "true")
      .csv("src/main/resources/ratings.dat")
      .toDF("UserID", "movieID", "rating", "timestamp")
    // 查询18岁女生评分为5分的电影ID
    val result = users.join(ratings, "UserID")
      .filter(col("gender") === "F" && col("age") === 18 && col("rating") === 5)
      .select("movieID")
      .distinct()
    result.show()
    sc.stop()
  }
}
