package com.niit.spark.sql.test



import org.apache.spark.sql.{DataFrame, SparkSession}
import org.apache.spark.sql.functions.sum
/**
 * Date:2025/5/16
 * Author：Ys
 * Description:
 */
object FilterAggregateSales {

  def main(args: Array[String]): Unit = {
    val spark: SparkSession = SparkSession.builder().appName("FilterAggregateSales").master("local[*]").getOrCreate()
    spark.sparkContext.setLogLevel("ERROR")
    import spark.implicits._

    val df: DataFrame = spark.read.option("header", "true").csv("input/sql/sales2.csv")

    //求 某个区域的销售总额
    df.filter("region == 'East' ").agg(sum("sales_amount").as("total_sales")).show()

    df.groupBy("region").agg(sum("sales_amount").as("total_sales")).show

    spark.stop()

  }

}
