package DianShang_2024.ds_server.indicator

import org.apache.spark.sql.SparkSession

object trait06 {
  def main(args: Array[String]): Unit = {
    /*
          请根据dwd或者dws层的相关表，计算销售量前10的商品，销售额前10的商品，存入ClickHouse数据库shtd_result的topten表中（表结构如下），然后在Linux
          的ClickHouse命令行中根据排名升序排序，查询出前5条;
     */
    //  准备sparksql的环境
    val spark=SparkSession.builder()
      .master("local[*]")
      .appName("指标计算第六题")
      .enableHiveSupport()
      .getOrCreate()

    /*   处理逻辑：
          1.首先将在fact_order_detail表里面查询出两张表，分别是销售量前十和销售金额前十的商品信息
          2.根据排名都是相同的进行合并
     */

    /*
          provuce:商品        amount:量
          product_amount:查询得到商品销售量前十的商品信息
     */
    spark.sql(
      """
        |select
        |product_id,
        |product_name,
        |product_amount,
        |row_number() over(order by product_amount desc) as row_number
        |from(
        |select
        |product_id,
        |product_name,
        |sum(product_cnt) as product_amount
        |from dwd_server.fact_order_detail
        |group by product_id,product_name
        |) as t1
        |""".stripMargin).createOrReplaceTempView("product_amount")

    spark.sql("select    *    from  product_amount ").show

    /*
          product_money:查询出商品销售金额前十的商品信息    Sell:卖    SellOut:卖出去
          这里的逻辑:1.首先求出每一行数据的销售金额，和商品id，商品名字
                    2.然后根据商品id，名字分组，计算总的销售额
                    3.然后根据销售额进行排序，赋值序号
     */
    spark.sql(
      """
        |select
        |product_id,
        |product_name,
        |SellOut_money,
        |row_number() over(order by SellOut_money desc) as row_number
        |from(
        |select
        |product_id,
        |product_name,
        |sum(number) as SellOut_money
        |from(
        |select
        |product_id as product_id,
        |product_name as product_name,
        |(product_cnt * product_price) as number
        |from dwd_server.fact_order_detail
        |) as t1
        |group by product_id,product_name
        |) as t2
        |""".stripMargin).createOrReplaceTempView("product_money")

    spark.sql("select * from product_money limit 20").show

    //  对两个表进行合并
    val result_data=spark.sql(
      """
        |select
        |t1.row_number as row_number,
        |t1.product_id as product_id_amount,
        |t1.product_name as product_name_amount,
        |t1.product_amount as product_amount,
        |t2.product_id as product_id_money,
        |t2.product_name as product_name_money,
        |t2.SellOut_money as SellOut_money
        |from product_amount as t1
        |join product_money as t2
        |on t1.row_number=t2.row_number
        |""".stripMargin)
    result_data.createOrReplaceTempView("result_table")

    spark.sql("select * from result_table limit 20").show

    //   将数据加载到clickhouse
    result_data.write
      .format("jdbc")
      .option("url","jdbc:clickhouse://192.168.40.110:8123/shtd_result")
      .option("user","default")
      .option("password","")
      .option("driver","com.clickhouse.jdbc.ClickHouseDriver")
      .option("dbtable","topten")
      .mode("append")
      .save()




    //  关闭sparksql的环境
    spark.close()

  }

}
