package com.hucais.etl.history.service

import java.util

import cn.hutool.core.date.DateUtil
import com.hucais.core.constant.Constants
import com.hucais.core.utils.{CommonUtils, DefaultPropertiesUtil}
import com.hucais.etl.common.bean.{CandidateCategory, DwsBookQueryInfo, TmpQueryInfo}
import com.hucais.etl.common.dao.MysqlDao
import com.hucais.etl.common.service.{CommonQueryService, JavaCommonService}
import com.hucais.etl.history.bean._
import com.hucais.etl.history.dao.HiveDao
import org.apache.spark.SparkContext
import org.apache.spark.broadcast.Broadcast
import org.apache.spark.sql.{Dataset, SparkSession}
import org.elasticsearch.spark.sql.EsSparkSQL

object HistoryKfzService {

  def action(ssc: SparkContext, sparkSession: SparkSession): Unit = {
    // 获取候选分类名单
    val categoryList = MysqlDao.getCandidateCategoryList
    val categoryListBd: Broadcast[util.List[CandidateCategory]] = ssc.broadcast(categoryList)

    // 获取ES数据
    val storeDS: Dataset[HistoryKfzBookStore] = HiveDao.getKfzBookStore(sparkSession)
    storeDS.cache()
    val bookInformationDS: Dataset[HistoryKfzBookInformation] = HiveDao.getKfzBookInformation(sparkSession)
    bookInformationDS.cache()
    val commentDS: Dataset[HistoryKfzBookComment] = HiveDao.getBookKfzComment(sparkSession)
    commentDS.cache()
    commentDS.createOrReplaceTempView("comments")
    // 清洗ES数据
    val tmpStroeDS = cleanStoreESData(sparkSession, storeDS)
    tmpStroeDS.cache()
    tmpStroeDS.createOrReplaceTempView("store")

    val tmpBookDS = cleanBookESData(sparkSession, bookInformationDS, categoryListBd)
    tmpBookDS.cache()
    tmpBookDS.createOrReplaceTempView("book")

    // 聚合全量数据
    val tmpBookBaseInfoDS = aggAllData(sparkSession)
    tmpBookBaseInfoDS.cache()
    // 组装Result数据输出到ES
    val resultDS = initResultData(sparkSession, tmpBookBaseInfoDS)
    EsSparkSQL.saveToEs(resultDS, DefaultPropertiesUtil.get("dws.book.query.info"))
  }

  /**
   * 初始化Result数据
   * @param sparkSession
   * @param tmpBookBaseInfoDS 临时的book_base_info DataSet
   * @return
   */
  private def initResultData(sparkSession: SparkSession, tmpBookBaseInfoDS: Dataset[TmpQueryInfo]) = {
    import sparkSession.implicits._
    tmpBookBaseInfoDS.mapPartitions(partitions => {
      partitions.map(item => {
        val sales = CommonUtils.handleBlankNumber(item.sales)
        val pages = CommonUtils.handleBlankNumber(item.pages)

        val selling_price = item.selling_price
        val store_comments = CommonUtils.handleBlankNumber(item.store_comments)
        //计算评论数和相应的积分
        val commentsTuple = CommonQueryService.calCommentsRangeAndIntegral(item.book_comments)
        val commentsRange = commentsTuple._1
        val commentsIntegral = commentsTuple._2

        // 计算溢价率和相应的积分
        val premiumTuple = CommonQueryService.calPremiumRangeAndIntegral(BigDecimal(selling_price), BigDecimal(item.store_pricing))
        val premiumRange = premiumTuple._1
        val premiumIntegral = premiumTuple._2

        //计算出版年限和相应的积分
        val publishedTuple = CommonQueryService.calPublishedYearRangeAndIntegral(item.publishing_time)
        val publishedYearRange = publishedTuple._1
        val publishedYearIntegral = publishedTuple._2

        //计算在售商家数量和相应的积分
        val sellingStoresTuple = CommonQueryService.calSellingStoresRangeAndIntegral(item.store_num)
        val sellingStoresRange=sellingStoresTuple._1
        val sellingStoresIntegral=sellingStoresTuple._2

        val totalIntegral = commentsIntegral + premiumIntegral + publishedYearIntegral + sellingStoresIntegral
        val currentTime = DateUtil.format(new java.util.Date(), "yyyy-MM-dd HH:mm:ssZ").replace(" ", "T")

        DwsBookQueryInfo(
          "逆向选品", Constants.SECOND_CHANNEL_KFZ, item.store_name, store_comments, item.isbn, item.book_name, item.category, item.slogan, item.description, null,
          null, item.book_comments, item.store_pricing, selling_price, item.publishing_house, item.publishing_time, null, null, item.edition, item.impression,
          null, sales, item.author, null, item.format, null, item.suits, item.binding_layout, pages, item.paper,
          item.store_num, publishedYearRange, publishedYearIntegral, commentsRange, commentsIntegral, premiumRange, premiumIntegral, sellingStoresRange, sellingStoresIntegral, totalIntegral,
          Constants.CREATE_TYPE_ETL, currentTime, currentTime
        )
      })
    })
  }

  /**
   * 聚合全量数据
   *
   * @param sparkSession
   * @return
   */
  private def aggAllData(sparkSession: SparkSession): Dataset[TmpQueryInfo] = {
    import sparkSession.implicits._
    sparkSession.sql(
      s"""
         |select
         |	a.isbn,store_name,selling_price,store_pricing,book_comments,b.store_num as store_num,
         |	slogan,store_comments,conditions,book_num,book_name,author,pricing,category,publishing_house,
         |	publishing_time,impression,edition,description,sales,binding_layout,format,pages,paper,suits
         |from
         |(
         |	select
         |		base.isbn,base.store_name,selling_price,store_pricing,coalesce(comment_info.book_comments,0) as book_comments,slogan,store_comments,
         |		conditions,book_num,book_name,author,pricing,category,publishing_house,publishing_time,impression,
         |		edition,description,sales,binding_layout,format,pages,paper,suits
         |	from
         |	(
         |		select
         |			isbn,book_name,author,pricing,category,publishing_house,publishing_time,impression,
         |			edition,description,sales,binding_layout,format,pages,paper,suits,
         |			store_name,slogan,store_comments,store_pricing,selling_price,conditions,book_num
         |		from
         |		(
         |			select
         |				b.isbn,b.book_name,b.author,b.pricing,b.category,b.publishing_house,b.publishing_time,b.impression,
         |				b.edition,b.description,b.sales,b.binding_layout,b.format,b.pages,b.paper,b.suits,
         |				a.store_name,a.slogan,a.store_comments,a.store_pricing,a.selling_price,
         |				a.conditions,a.book_num,row_number() over(partition by a.isbn,a.store_pricing,a.selling_price order by a.selling_price) as rn
         |			from
         |			(
         |				select a.isbn,a.store_name,a.slogan,a.store_comments,a.conditions,a.book_num,a.store_pricing,b.selling_price
         |				FROM store a
         |				inner JOIN (
         |					SELECT isbn, store_pricing, max(selling_price) AS selling_price
         |					FROM store GROUP BY isbn,store_pricing
         |				) b ON a.isbn = b.isbn AND a.store_pricing = b.store_pricing AND a.selling_price = b.selling_price
         |
         |				UNION ALL
         |
         |				select a.isbn,a.store_name,a.slogan,a.store_comments,a.conditions,a.book_num,a.store_pricing,b.selling_price
         |				FROM store a
         |				inner JOIN (
         |					SELECT isbn, store_pricing, min(selling_price) AS selling_price
         |					FROM store GROUP BY isbn,store_pricing
         |				) b ON a.isbn = b.isbn AND a.store_pricing = b.store_pricing AND a.selling_price = b.selling_price
         |			)a inner join book b on a.isbn = b.isbn
         |		)t where t.rn=1
         |	)base
         |	left join
         |	(
         |		SELECT a.isbn,b.store_name,count(*) AS book_comments
         |		FROM book a INNER JOIN comments b ON a.book_name = b.book_name
         |		GROUP BY a.isbn,b.store_name
         |	)comment_info ON base.isbn = comment_info.isbn AND base.store_name = comment_info.store_name
         |)a,
         |(
         |	select a.isbn as isbn,a.book_name as bookname,count(*) as store_num
         |	from book as a left join store as b on a.isbn = b.isbn
         |	group by a.isbn,a.book_name
         |)b
         |where a.isbn = b.isbn
         |""".stripMargin).as[TmpQueryInfo]
  }

  /**
   * 清洗ES中store的数据
   *
   * @param sparkSession
   * @param storeDS store的index数据
   * @return
   */
  private def cleanStoreESData(sparkSession: SparkSession, storeDS: Dataset[HistoryKfzBookStore]): Dataset[TmpKfzBookStore] = {
    import sparkSession.implicits._
    storeDS.mapPartitions(partitions => {
      partitions
        .filter(item => CommonUtils.isNotBlankExt(item.isbn) && CommonUtils.isNotBlankExt(item.name_of_shop) &&
                        CommonUtils.isNotBlankExt(item.store_pricing) && CommonUtils.isNotBlankExt(item.selling_price))
        .map(item => {
          val isbn = CommonUtils.handleBlankStr(item.isbn)
          val storeName = CommonUtils.handleBlankStr(item.name_of_shop)
          val bookNum = CommonUtils.handleBlankNumber(item.number)
          val storePricing = CommonUtils.handleBlankFloat(CommonUtils.filterSomeStr(item.store_pricing))
          val sellingPrince = CommonUtils.handleBlankFloat(item.selling_price)
          val slogan = CommonUtils.handleBlankStr(item.slogan)
          val bookComments = CommonUtils.handleBlankNumber(item.book_review)
          val conditions = CommonUtils.handleBlankStr(item.conditions)

          TmpKfzBookStore(isbn, storeName, slogan, bookComments, sellingPrince, storePricing, conditions, bookNum)
        })
    })
  }

  /**
   * 清洗ES中book的数据
   *
   * @param sparkSession
   * @param bookInformationDS book的Index数据
   * @return
   */
  private def cleanBookESData(sparkSession: SparkSession, bookInformationDS: Dataset[HistoryKfzBookInformation],
                              categoryListBd: Broadcast[util.List[CandidateCategory]]): Dataset[TmpKfzBookInformation] = {
    import sparkSession.implicits._
    bookInformationDS.mapPartitions(partitions => {
      partitions
        .filter(item => CommonUtils.isNotBlankExt(item.isbn) && CommonUtils.isNotBlankExt(item.book_name) &&
                        CommonUtils.isNotBlankExt(item.author) && CommonUtils.isNotBlankExt(item.publishing_house) &&
                        CommonUtils.isNotBlankExt(item.publishing_time))
        .map(item => {
          val pricing = CommonUtils.handleBlankFloat(item.pricing)
          val impression = CommonUtils.handleBlankStr(item.impression)
          val edition = CommonUtils.handleBlankStr(item.edition)
          val binding_layout = CommonUtils.handleBlankStr(item.binding_layout)
          val format = CommonUtils.handleBlankStr(item.format)
          val paper = CommonUtils.handleBlankStr(item.paper)
          val suits = CommonUtils.handleBlankStr(item.number_of_suit)

          val publishingHouse = item.publishing_house.replaceAll("<span class=\"text-value\">", "");
          val bookName = CommonUtils.filterSomeStr(item.book_name)

          val categoryTmp = CommonUtils.handleBlankStr(item.category)
          val category = JavaCommonService.categorizeBookData(categoryTmp, categoryListBd.value)

          var pages = "未知"
          if (CommonUtils.isNotBlankExt(item.number_of_pages)) {
            pages = CommonUtils.handleBlankNumber(item.number_of_pages.replaceAll("页", "").replaceAll(" ", "")).toString
          }
          /*var bookDescription = "未知"
          if(CommonUtils.isNotBlankExt(item.book_description)){
            bookDescription = CommonUtils.filterSomeStr(item.book_description)
          }*/
          val bookDescription = "未知"

          TmpKfzBookInformation(
            item.isbn, bookName, item.author, pricing, category, publishingHouse, item.publishing_time,
            impression, edition, bookDescription, item.sales_volume, binding_layout, format, pages, paper, suits)
        })
    })
  }


}
