package cn.pengpeng

import org.apache.spark.sql.{DataFrame, SparkSession}

/**
  * 自定义运算
  *
  */
object MyCalculate {
    /**
      * 计算所有订单每年的总金额
      */
    def eachYearAllAmount(spark: SparkSession, sdateDF: DataFrame, stockDF: DataFrame, detailDF: DataFrame): Unit = {
        //先建立临时视图
        sdateDF.createTempView("sdate")
        stockDF.createTempView("stock")
        detailDF.createTempView("detail")
        //执行SQL语句
        val result: DataFrame = spark.sql("select n.theyear, count(m.amount) from detail m join (select a.ordernumber,b.theyear from stock a join sdate b on a.dateID = b.dateID) n on m.ordernumber = n.ordernumber group by n.theyear")
        //收集结果
        result.show()
    }

    /**
      * 计算所有订单每年最大金额订单的销售额
      */
    def eachYearMaxAmount(spark: SparkSession, sdateDF: DataFrame, stockDF: DataFrame, detailDF: DataFrame): Unit = {
        //先建立临时视图
        sdateDF.createTempView("sdate")
        stockDF.createTempView("stock")
        detailDF.createTempView("detail")
        //执行SQL语句
//        val temp: DataFrame = spark.sql("select n.theyear,m.ordernumber, m.counts from (select ordernumber, sum(amount) counts from detail group by ordernumber) m join (select a.ordernumber,b.theyear from stock a join sdate b on a.dateID = b.dateID) n on m.ordernumber = n.ordernumber")
//        temp.createTempView("temp")
//        val temp2: DataFrame = spark.sql("select theyear, ordernumber, counts, row_number() over(partition by theyear order by counts desc) rk from temp")
//        temp2.createTempView("temp2")
//        val result =spark.sql("select * from temp2 where rk = 1")

//        val result=spark.sql("select c.theyear, max(total) from\nstock a \njoin\n(select ordernumber, sum(amount) total from detail group by ordernumber) b\non a.ordernumber=b.ordernumber\njoin sdate c\non a.dateID=c.dateID\ngroup by c.theyear")

        //val result = spark.sql("select * from detail where ordernumber ='BYSL00001179'")
        val result = spark.sql("select ordernumber,sum(amount) as total  from detail group by ordernumber having total>100000")
        //收集结果
        result.show()
    }

    /**
      * 统计所有订单中季度销售额前10位
      */
    def eachQuotUpperTen(spark: SparkSession, sdateDF: DataFrame, stockDF: DataFrame, detailDF: DataFrame): Unit = {
        //先建立临时视图
        sdateDF.createTempView("sdate")
        stockDF.createTempView("stock")
        detailDF.createTempView("detail")
        //执行SQL语句
        val temp: DataFrame = spark.sql("select n.thequot, m.ordernumber, m.amount, row_number() over(partition by n.thequot order by m.amount desc) rk from \ndetail m join \n(select a.ordernumber,b.thequot from stock a join sdate b on a.dateID = b.dateID) n\non m.ordernumber = n.ordernumber")
        temp.createTempView("temp")
        val result =spark.sql("select * from temp where rk < 11")
        //收集结果
        result.show()
    }
}
