package com.hucais.agg.service

import com.hucais.agg.bean.DwtCategorysDetail
import com.hucais.agg.dao.SourceIndexDataDao
import com.hucais.core.utils.DefaultPropertiesUtil
import org.apache.spark.SparkContext
import org.apache.spark.sql.SparkSession
import org.elasticsearch.spark.sql.EsSparkSQL

object CategorysDetailService {
  def action(ssc: SparkContext, sparkSession: SparkSession): Unit = {
    // 获取book_base_info数据
    val bookBaseInfoDS = SourceIndexDataDao.getSearchInfoData(sparkSession)
    bookBaseInfoDS.cache()
    bookBaseInfoDS.createOrReplaceTempView("dwd_book_base_info")

    val openBookDS = SourceIndexDataDao.getOpenData(sparkSession)
    openBookDS.cache()
    openBookDS.createOrReplaceTempView("ods_openbooks")

    import sparkSession.implicits._
    val tmpInfoDF = sparkSession.sql(
      s"""
         |select
         |		channel,category as product_type,category_num as product_type_resources,
         |		publishing_house,publishing_house_num as publishing_house_resources
         |	from
         |	(
         |		select
         |			second_channel as channel,
         |			category,count(category) over(partition by second_channel,category order by category desc) as category_num,
         |			publishing_house,count(publishing_house) over(partition by second_channel,category,publishing_house) as publishing_house_num,
         |			row_number() over(partition by second_channel,category,publishing_house order by 1) rn
         |		from dwd_book_base_info
         |		where second_channel is not null and category is not null and publishing_house is not null
         |	)a where a.rn=1
         |	union all
         |	select
         |		channel,category as product_type,category_num as product_type_resources,
         |		publishing_house,publishing_house_num as publishing_house_resources
         |	from
         |	(
         |		select
         |			'开卷数据' as channel,
         |			category,count(category) over(partition by category order by 1) as category_num,
         |			publishing_house,count(publishing_house) over(partition by category,publishing_house order by 1) as publishing_house_num,
         |			row_number() over(partition by category,publishing_house order by 1) rn
         |	from ods_openbooks
         |	)a where a.rn=1
         |""".stripMargin)
    tmpInfoDF.createOrReplaceTempView("tmp_info")

    val resultDS = sparkSession.sql(
      s"""
         |select
         |	coalesce(channel,'全部') as channel,
         |	product_type,sum(product_type_resources) as product_type_resources,
         |	publishing_house,sum(publishing_house_resources) as publishing_house_resources,date_format(date_add(current_date,-1),'yyyy-MM-dd') as create_time
         |from tmp_info
         |group by channel,product_type,publishing_house
         |grouping sets ((product_type,publishing_house),(channel,product_type,publishing_house))
         |""".stripMargin).as[DwtCategorysDetail]
    EsSparkSQL.saveToEs(resultDS, DefaultPropertiesUtil.get("index.categorys.detail"))

  }

}
