package com.analysis

//商品热门关键词top300
import org.apache.spark.sql.SparkSession
import org.apache.spark.sql.functions._

object TopKeywordsInDescription {
  def main(args: Array[String]): Unit = {
    val spark = SparkSession.builder()
      .appName("Top 300 Keywords in Product Description")
      .master("local[*]")
      .getOrCreate()

    val cleanedDF = spark.read.parquet("output/cleaned_data.parquet")

    val resultDF = cleanedDF
      .select(explode(split(lower(col("Description")), "\\W+")).as("word"))
      .filter(length(col("word")) > 2)
      .groupBy("word")
      .count()
      .orderBy(desc("count"))
      .limit(300)

    resultDF.write
      .mode("overwrite")
      .json("output/analysis_results/top_keywords_in_description")

    resultDF.show()

    spark.stop()
  }
}