package com.hucais.agg.service

import com.hucais.agg.bean.{BookBaseInfoExt, DwtChannelDimMetricDetail}
import com.hucais.agg.dao.SourceIndexDataDao
import com.hucais.core.utils.DefaultPropertiesUtil
import org.apache.spark.SparkContext
import org.apache.spark.sql.{Dataset, SparkSession}
import org.elasticsearch.spark.sql.EsSparkSQL

object ChannelDimMetricDetailService {
  def action(ssc: SparkContext, sparkSession: SparkSession): Unit = {
    // 获取book_base_info数据
    val bookBaseInfoDS: Dataset[BookBaseInfoExt] = SourceIndexDataDao.getSearchInfoDataExt(sparkSession)
    bookBaseInfoDS.cache()
    bookBaseInfoDS.createOrReplaceTempView("dwd_book_base_info")
    // 获取上架产品数据
    val shelfProductDS = SourceIndexDataDao.getShelfProduct(sparkSession)
    shelfProductDS.cache()
    shelfProductDS.createOrReplaceTempView("ods_shelf_products")
    // 输出统计数据
    outPutAggData(sparkSession)
  }

  /**
   * 输出统计数据
   *
   * @param sparkSession
   */
  private def outPutAggData(sparkSession: SparkSession): Unit = {
    ChannelDimMetricDetailService.outPutPress(sparkSession)
    ChannelDimMetricDetailService.outPutCategorys(sparkSession)
    ChannelDimMetricDetailService.outPutResources(sparkSession)
    ChannelDimMetricDetailService.outPutPremiumCategorys(sparkSession)
    ChannelDimMetricDetailService.outPutMultiple(sparkSession)
    ChannelDimMetricDetailService.outPutPremiumProduct(sparkSession)
  }

  /**
   * 输出"出版社输出产品数占比"的指标数据
   *
   * @param sparkSession
   */
  private def outPutPress(sparkSession: SparkSession): Unit = {
    import sparkSession.implicits._
    val resultDS = sparkSession.sql(
      s"""
         |select a.channel,'press' as metric_type,a.publishing_house as key,a.value,a.total_val,date_format(date_add(current_date,-1),'yyyy-MM-dd') as create_time
         |from
         |(
         |	select a.channel,a.publishing_house,a.value,sum(a.value) over(partition by channel) as total_val,
         |	row_number() over(partition by channel order by value desc) rank
         |	from
         |	(
         |		select nvl(channel,'全部') as channel,publishing_house,count(*) as value
         |		from dwd_book_base_info
         |		where publishing_house is not null
         |		GROUP by channel,publishing_house
         |		grouping sets ((channel,publishing_house),(publishing_house))
         |	)a
         |)a where a.rank<=10
         |""".stripMargin).as[DwtChannelDimMetricDetail]
    EsSparkSQL.saveToEs(resultDS, DefaultPropertiesUtil.get("index.channel.dim.metric.detail"))
  }

  /**
   * 输出"各类目品种数"的指标数据
   *
   * @param sparkSession
   */
  private def outPutCategorys(sparkSession: SparkSession): Unit = {
    import sparkSession.implicits._
    val resultDS = sparkSession.sql(
      s"""
         |select a.channel,'categorys' as metric_type,a.category as key,a.value,a.total_val,date_format(date_add(current_date,-1),'yyyy-MM-dd') as create_time
         |from
         |(
         |	select a.channel,a.category,a.value,sum(a.value) over(partition by channel) as total_val,
         |	row_number() over(partition by channel order by value desc) rank
         |	from
         |	(
         |		select nvl(channel,'全部') as channel,category,count(*) as value
         |		from dwd_book_base_info
         |		where category is not null
         |		GROUP by channel,category
         |		grouping sets ((channel,category),(category))
         |	)a
         |)a where a.rank<=10
         |""".stripMargin).as[DwtChannelDimMetricDetail]
    EsSparkSQL.saveToEs(resultDS, DefaultPropertiesUtil.get("index.channel.dim.metric.detail"))
  }

  /**
   * 输出"渠道资源数"的指标数据
   *
   * @param sparkSession
   */
  private def outPutResources(sparkSession: SparkSession): Unit = {
    import sparkSession.implicits._
    val resultDS = sparkSession.sql(
      s"""
         |select '无' as channel,'resources' as metric_type,a.channel as key,
         |a.resources as value,if(b.resources is null,0,b.resources) as total_val ,date_format(date_add(current_date,-1),'yyyy-MM-dd') as create_time
         |from
         |(
         |	select channel,count(*) as resources from ods_shelf_products group by channel
         |) a
         |left join
         |(
         |	select channel,COUNT(*) as resources from dwd_book_base_info group by channel
         |) b
         |on a.channel=b.channel
         |""".stripMargin).as[DwtChannelDimMetricDetail]

    EsSparkSQL.saveToEs(resultDS, DefaultPropertiesUtil.get("index.channel.dim.metric.detail"))
  }

  /**
   * 输出"溢价产品类目占比"的指标数据
   *
   * @param sparkSession
   */
  private def outPutPremiumCategorys(sparkSession: SparkSession): Unit = {
    import sparkSession.implicits._
    val resultDS = sparkSession.sql(
      s"""
         |select a.channel,'premium-categorys' as metric_type,a.category as key,a.value,a.total_val,date_format(date_add(current_date,-1),'yyyy-MM-dd') as create_time
         |from
         |(
         |	select a.channel,a.category,a.value,sum(a.value) over(partition by channel)  as total_val,
         |	row_number() over(partition by channel order by value desc) rank
         |	from
         |	(
         |		select nvl(channel,'全部') as channel,category,count(*) as value
         |		from dwd_book_base_info
         |		where category is not null and premium_type='溢价'
         |		GROUP by channel,category
         |		grouping sets ((channel,category),(category))
         |	)a
         |)a where a.rank<=10
         |""".stripMargin).as[DwtChannelDimMetricDetail]
    EsSparkSQL.saveToEs(resultDS, DefaultPropertiesUtil.get("index.channel.dim.metric.detail"))
  }

  /**
   * 输出"溢价倍数比例"的指标数据
   *
   * @param sparkSession
   */
  private def outPutMultiple(sparkSession: SparkSession): Unit = {
    import sparkSession.implicits._
    val resultDS = sparkSession.sql(
      s"""
         |	select a.channel,'multiple' as metric_type,a.premium_multiple as key,a.value,sum(a.value) over(partition by channel)  as total_val,date_format(date_add(current_date,-1),'yyyy-MM-dd') as create_time
         |	from
         |	(
         |		select nvl(channel,'全部') as channel,premium_multiple,count(*) as value
         |		from dwd_book_base_info
         |		GROUP by channel,premium_multiple
         |		grouping sets ((channel,premium_multiple),(premium_multiple))
         |	)a
         |""".stripMargin).as[DwtChannelDimMetricDetail]
    EsSparkSQL.saveToEs(resultDS, DefaultPropertiesUtil.get("index.channel.dim.metric.detail"))
  }

  /**
   * 输出"溢价产品数"的指标数据
   *
   * @param sparkSession
   */
  private def outPutPremiumProduct(sparkSession: SparkSession): Unit = {
    import sparkSession.implicits._
    val resultDS = sparkSession.sql(
      s"""
         |select a.channel,'premium-product' as metric_type,a.premium_type as key,a.value,sum(a.value) over(partition by channel)  as total_val,date_format(date_add(current_date,-1),'yyyy-MM-dd') as create_time
         |	from
         |	(
         |		select nvl(channel,'全部') as channel,premium_type,count(*) as value
         |		from dwd_book_base_info
         |		GROUP by channel,premium_type
         |		grouping sets ((channel,premium_type),(premium_type))
         |	)a
         |""".stripMargin).as[DwtChannelDimMetricDetail]
    EsSparkSQL.saveToEs(resultDS, DefaultPropertiesUtil.get("index.channel.dim.metric.detail"))
  }

}
