package com.analysis

import org.apache.spark.sql.SparkSession
import org.apache.spark.sql.functions._

object MonthlyNewCustomers {
  def main(args: Array[String]): Unit = {
    val spark = SparkSession.builder()
      .appName("Monthly New Customers Trend")
      .master("local[*]")
      .getOrCreate()

    val cleanedDF = spark.read.parquet("output/cleaned_data.parquet")

    // 找到每个客户的首次购买日期
    val firstPurchaseDF = cleanedDF
      .filter(col("CustomerID").isNotNull)
      .withColumn("YearMonth", date_format(col("InvoiceDate"), "yyyy-MM"))
      .groupBy("CustomerID")
      .agg(min("YearMonth").as("FirstPurchaseMonth"))

    // 按月份统计首次购买的客户数
    val resultDF = firstPurchaseDF
      .groupBy("FirstPurchaseMonth")
      .agg(count("CustomerID").as("NewCustomerCount"))
      .orderBy("FirstPurchaseMonth")

    resultDF.write
      .mode("overwrite")
      .json("output/analysis_results/monthly_new_customers")

    resultDF.show()

    spark.stop()
  }
}