package com.leal.client

import org.apache.spark.sql.{DataFrame, SparkSession}

import scala.concurrent.ExecutionContext.Implicits.global
import scala.concurrent.Future

//import org.apache.spark.sql.{DataFrame, SparkSession}
//import scala.concurrent.{ExecutionContext, Future}

/**
 * @projectName com.leal.client
 * @description:
 * @author leal123
 * @date 2023/3/30 20:59
 */
object SparkMultiThreadExample {
  def main(args: Array[String]): Unit = {

    // 创建SparkSession对象
    val spark = SparkSession.builder()
      .appName("SparkMultiThreadSqlExample")
      .master("local[*]")
      .getOrCreate()

    // 读取数据
    val sales: DataFrame = spark.read.format("csv")
      .option("header", "true")
      .option("inferSchema", "true")
      .load("E://100-Data//spark//sales.csv")
    println("程序开始运行")
    // 定义并发任务
    sales.show()
    sales.createOrReplaceTempView("sales")

    val futures = Seq(
      Future {
        val result = spark.sql(
          """
            |SELECT customer_id, product_id, SUM(quantity) AS total_quantity, SUM(quantity * price) AS total_revenue
            |FROM sales
            |GROUP BY customer_id, product_id
         """.stripMargin)
        result.collect()
      },
      Future {
        val result = spark.sql(
          """
            |SELECT product_id, SUM(quantity) AS total_quantity, SUM(quantity * price) AS total_revenue
            |FROM sales
            |GROUP BY product_id
         """.stripMargin)
        result.collect()
      },
      Future {
        val result = spark.sql(
          """
            |SELECT sale_date, SUM(quantity * price) AS total_revenue
            |FROM sales
            |GROUP BY sale_date
         """.stripMargin)
        result.collect()
      }
    )

    // 等待所有任务执行完毕
    val allResults = Future.sequence(futures).flatMap { results =>
      Future.successful(results.flatten)
    }

    // 输出结果
    allResults.foreach(println)
    println("结果为: ", allResults)

    // 关闭SparkSession对象
    spark.stop()
  }


}
