package com.supermarket.spark.recommender

import org.apache.spark.sql._
import org.apache.spark.sql.functions._
import org.apache.spark.ml.recommendation.ALS
import org.apache.spark.sql.types.{IntegerType, StringType, StructType}

import java.util.Properties

object RecommenderSystem {
  def main(args: Array[String]): Unit = {
    System.setProperty("hadoop.home.dir", "D:\\tmp\\hadoop")
    val spark = SparkSession.builder
      .appName("Supermarket Collaborative Filtering")
      .master("local[*]")
      .getOrCreate()

    import spark.implicits._

    // 读取 HDFS 上 Flume 写入的原始数据
    val schema = new StructType()
      .add("user_id", IntegerType)
      .add("product_id", IntegerType)
      .add("product_name", StringType)
      .add("in_stock", IntegerType)
      .add("favor_level", IntegerType)
      .add("sold", IntegerType)
      .add("status", StringType)

    val rawDF = spark.read
      .option("sep", "\t")
      .schema(schema)
      .csv("hdfs://niit01:8020/user/flume/supermarket00/*")

    // 提取用户-商品评分数据
    val ratingsDF = rawDF.select(
      col("user_id").alias("user").cast("int"),
      col("product_id").alias("item").cast("int"),
      col("favor_level").cast("double").alias("rating")
    ).distinct().na.drop()

    ratingsDF.cache().createOrReplaceTempView("ratings")

    // 使用 ALS 训练模型
    val als = new ALS()
      .setUserCol("user")
      .setItemCol("item")
      .setRatingCol("rating")
      .setColdStartStrategy("drop")  // 处理冷启动问题

    val model = als.fit(ratingsDF)

    // a. User-Based CF：为每个用户推荐 6 个商品
    val userRecs = model.recommendForAllUsers(6)
      .withColumnRenamed("user", "user_id")

    userRecs.show(false)

    // b. Item-Based CF：为每个商品推荐 6 个 用户
    val itemRecs = model.recommendForAllItems(6)
      .withColumnRenamed("item", "product_id")

    itemRecs.show(false)

    // 可选：将推荐结果转为字符串列表格式
    val userRecsFormatted = userRecs.map { row =>
      val userId = row.getInt(0)
      val recommendations = row.getSeq[Row](1).map(_.getInt(0)).mkString(",")
      (userId, recommendations)
    }.toDF("user_id", "recommended_items")

    val itemRecsFormatted = itemRecs.map { row =>
      val productId = row.getInt(0)
      val recommendations = row.getSeq[Row](1).map(_.getInt(0)).mkString(",")
      (productId, recommendations)
    }.toDF("product_id", "recommended_userid")

    // MySQL 连接信息
    val jdbcUrl = "jdbc:mysql://43.140.205.103:3306/supermarket"
    val connectionProperties = new Properties()
    connectionProperties.put("user", "supermarket")
    connectionProperties.put("password", "a7NrdbX8hiAZ8Nxb")
    connectionProperties.put("driver", "com.mysql.cj.jdbc.Driver")

    // c. 写入 MySQL 表（user_recommendations）
    userRecsFormatted.write
      .mode("overwrite")
      .jdbc(jdbcUrl, "user_recommendations", connectionProperties)

    // d. 写入 MySQL 表（item_recommendations）
    itemRecsFormatted.write
      .mode("overwrite")
      .jdbc(jdbcUrl, "item_recommendations", connectionProperties)

    spark.stop()
  }
}