package com.hucais.etl.common.service

import java.util

import cn.hutool.core.date.DateUtil
import com.hucais.core.constant.Constants
import com.hucais.core.utils.CommonUtils
import com.hucais.etl.common.bean._
import com.hucais.etl.job._
import org.apache.spark.broadcast.Broadcast
import org.apache.spark.sql.{DataFrame, Dataset, SaveMode, SparkSession}

/**
 * 在线检索-通用服务
 */
object CommonQueryService {

  /**
   * 默认的清洗采集的书籍数据处理
   *
   * @param sparkSession
   * @param bookDS         需要清洗的书籍数据DataSet
   * @param categoryListBd 候选分类名单
   * @return
   */
  def defultCleanBookData(sparkSession: SparkSession, bookDS: Dataset[OdsBookInformation],
                          categoryListBd: Broadcast[util.List[CandidateCategory]]): Dataset[TmpBookInformation] = {
    import sparkSession.implicits._
    bookDS.mapPartitions(partitions => {
      partitions
        .filter(item => CommonUtils.isNotBlankExt(item.isbn) && CommonUtils.isNotBlankExt(item.book_name) &&
          CommonUtils.isNotBlankExt(item.author) && CommonUtils.isNotBlankExt(item.publishing_house) &&
          CommonUtils.isNotBlankExt(item.publishing_time))
        .map(item => {
          val pricing = CommonUtils.handleBlankFloat(item.pricing)
          val impression = CommonUtils.handleBlankStr(item.impression)
          val edition = CommonUtils.handleBlankStr(item.edition)
          val sales = CommonUtils.handleBlankStr(item.sales_volume)
          val binding_layout = CommonUtils.handleBlankStr(item.binding_layout)
          val format = CommonUtils.handleBlankStr(item.format)
          val paper = CommonUtils.handleBlankStr(item.paper)
          val suits = CommonUtils.handleBlankStr(item.number_of_suit)

          val publishingHouse = item.publishing_house.replaceAll("<span class=\"text-value\">", "");
          val bookName = CommonUtils.filterSomeStr(item.book_name)

          val categoryTmp = CommonUtils.handleBlankStr(item.category)
          val category = JavaCommonService.categorizeBookData(categoryTmp, categoryListBd.value)

          var pages = "未知"
          if (CommonUtils.isNotBlankExt(item.number_of_pages)) {
            pages = CommonUtils.handleBlankNumber(item.number_of_pages.replaceAll("页", "").replaceAll(" ", "")).toString
          }

          val description = "未知"
          TmpBookInformation(
            item.isbn, bookName, item.author, pricing, category, publishingHouse, item.publishing_time,
            impression, edition, description, sales, binding_layout, format, pages, paper, suits)
        })
    })

  }

  /**
   * 默认的清洗采集的书店数据处理
   *
   * @param sparkSession
   * @param storeDS 需要清洗的书店数据DataSet
   * @return
   */
  def defultCleanStoreData(sparkSession: SparkSession, storeDS: Dataset[OdsBookStore]): Dataset[TmpBookStore] = {
    import sparkSession.implicits._
    storeDS.mapPartitions(partitions => {
      partitions
        .filter(item => CommonUtils.isNotBlankExt(item.isbn) && CommonUtils.isNotBlankExt(item.name_of_shop) &&
          CommonUtils.isNotBlankExt(item.store_pricing) && CommonUtils.isNotBlankExt(item.selling_price))
        .map(item => {
          val isbn = CommonUtils.handleBlankStr(item.isbn)
          val storeName = CommonUtils.handleBlankStr(item.name_of_shop)
          val bookNum = CommonUtils.handleBlankNumber(item.number)
          val storePricing = CommonUtils.handleBlankFloat(CommonUtils.filterSomeStr(item.store_pricing))
          val sellingPrince = CommonUtils.handleBlankFloat(item.selling_price)
          val slogan = CommonUtils.handleBlankStr(item.slogan)
          val bookComments = CommonUtils.handleBlankNumber(item.book_review)
          val conditions = CommonUtils.handleBlankStr(item.conditions)

          TmpBookStore(isbn, storeName, slogan, bookComments, sellingPrince, storePricing, conditions, bookNum)
        })
    })
  }

  def defaultAggAllData(sparkSession: SparkSession): Dataset[TmpQueryInfo] = {
    import sparkSession.implicits._
    sparkSession.sql(
      s"""
         |select
         |	a.isbn,store_name,selling_price,store_pricing,coalesce(book_comments,0) as book_comments,coalesce(b.store_num,0) as store_num,
         |	slogan,store_comments,conditions,book_num,a.book_name,author,pricing,category,publishing_house,
         |	publishing_time,impression,edition,description,sales,binding_layout,format,pages,paper,suits
         |from
         |(
         |	select
         |		base.isbn,base.store_name,selling_price,store_pricing,book_comments,slogan,store_comments,
         |		conditions,book_num,book_name,author,pricing,category,publishing_house,publishing_time,impression,
         |		edition,description,sales,binding_layout,format,pages,paper,suits
         |	from
         |	(
         |		select b.isbn,b.book_name,b.author,b.pricing,b.category,b.publishing_house,b.publishing_time,b.impression,
         |		b.edition,b.description,b.sales,b.binding_layout,b.format,b.pages,b.paper,b.suits,
         |		a.store_name,a.slogan,a.store_comments,a.store_pricing,a.selling_price,
         |		a.conditions,a.book_num
         |		from store a left join book b on a.isbn =b.isbn
         |	)base
         |	left join
         |	(
         |		select a.isbn,b.store_name,count(*) AS book_comments
         |		FROM book a INNER JOIN comment b ON a.book_name = b.book_name
         |		GROUP BY a.isbn,b.store_name
         |	)comment_info on base.isbn = comment_info.isbn and base.store_name=comment_info.store_name
         |) a
         |left join
         |(
         |	select a.isbn as isbn,a.book_name,count(*) as store_num
         |	from book a left join store b on a.isbn =b.isbn
         |	group by a.isbn,a.book_name
         |)b on a.isbn = b.isbn
         |""".stripMargin
    ).as[TmpQueryInfo]
  }


  /**
   * 初始化全文检索的Result数据
   *
   * @param sparkSession
   * @param tmpBookBaseInfoDS 聚合后的临时书籍数据
   * @param firstChannel      一级渠道
   * @param secondChannel     二级渠道
   * @return
   */
  def initResultData(sparkSession: SparkSession, tmpBookBaseInfoDS: Dataset[TmpQueryInfo],
                     firstChannel: String, secondChannel: String): Dataset[DwsBookQueryInfo] = {
    import sparkSession.implicits._
    tmpBookBaseInfoDS.mapPartitions(partitions => {
      partitions.map(item => {
        val sales = CommonUtils.handleBlankNumber(item.sales)
        val pages = CommonUtils.handleBlankNumber(item.pages)

        val selling_price = item.selling_price
        val store_comments = CommonUtils.handleBlankNumber(item.store_comments)
        //计算评论数和相应的积分
        val commentsTuple = CommonQueryService.calCommentsRangeAndIntegral(item.book_comments)
        val commentsRange = commentsTuple._1
        val commentsIntegral = commentsTuple._2

        // 计算溢价率和相应的积分
        val premiumTuple = CommonQueryService.calPremiumRangeAndIntegral(BigDecimal(selling_price), BigDecimal(item.store_pricing))
        val premiumRange = premiumTuple._1
        val premiumIntegral = premiumTuple._2

        //计算出版年限和相应的积分
        val publishedTuple = CommonQueryService.calPublishedYearRangeAndIntegral(item.publishing_time)
        val publishedYearRange = publishedTuple._1
        val publishedYearIntegral = publishedTuple._2

        //计算在售商家数量和相应的积分
        val sellingStoresTuple = CommonQueryService.calSellingStoresRangeAndIntegral(item.store_num)
        val sellingStoresRange = sellingStoresTuple._1
        val sellingStoresIntegral = sellingStoresTuple._2

        val totalIntegral = commentsIntegral + premiumIntegral + publishedYearIntegral + sellingStoresIntegral
        val currentTime = DateUtil.format(new java.util.Date(), "yyyy-MM-dd HH:mm:ssZ").replace(" ", "T")

        DwsBookQueryInfo(
          firstChannel, secondChannel, item.store_name, store_comments, item.isbn, item.book_name, item.category, item.slogan, item.description, null,
          null, item.book_comments, item.store_pricing, selling_price, item.publishing_house, item.publishing_time, null, null, item.edition, item.impression,
          null, sales, item.author, null, item.format, null, item.suits, item.binding_layout, pages, item.paper,
          item.store_num, publishedYearRange, publishedYearIntegral, commentsRange, commentsIntegral, premiumRange, premiumIntegral, sellingStoresRange, sellingStoresIntegral, totalIntegral,
          Constants.CREATE_TYPE_ETL, currentTime, currentTime
        )
      })
    })
  }


  /**
   * 将 DataFrame 保存为 hdfs 文件 同时指定保存绝对路径 与 分隔符
   *
   * @param dataFrame  需要保存的 DataFrame
   * @param absSaveDir 保存保存的路径 （绝对路径）
   * @param splitRex   指定分割分隔符
   * @param saveMode   保存的模式：Append、Overwrite、ErrorIfExists、Ignore
   */
  def saveAsFileAbsPath(dataFrame: DataFrame, absSaveDir: String, splitRex: String, saveMode: SaveMode): Unit = {
    dataFrame.sqlContext.sparkContext.hadoopConfiguration.set("mapred.output.compress", "false")
    val allClumnName: String = dataFrame.columns.mkString(",")
    val result: DataFrame = dataFrame.selectExpr(s"concat_ws('$splitRex',$allClumnName) as allclumn")
    result.write.mode(saveMode).text(absSaveDir)
  }

  /**
   * 计算书评论数的范围和相应的积分
   *
   * @param bookCommentNum 书评论数
   * @return
   */
  def calCommentsRangeAndIntegral(bookCommentNum: Long): (String, Long) = {
    var commentsRange = "未知"
    var commentsIntegral: Long = 0
    bookCommentNum match {
      case bookCommentNum if (bookCommentNum < 10) =>
        commentsRange = "10个以下"
        commentsIntegral = 15
      case bookCommentNum if 10 until 20 contains bookCommentNum =>
        commentsRange = "10-20个"
        commentsIntegral = 20
      case bookCommentNum if (bookCommentNum > 20) =>
        commentsRange = "20个以上"
        commentsIntegral = 25
      case _ =>
        commentsRange = "未知"
        commentsIntegral = 0
    }

    (commentsRange, commentsIntegral)
  }

  /**
   * 计算溢价率区间和相应的积分
   *
   * @param sellingPrince 售价
   * @param storePricing  定价
   * @return
   */
  def calPremiumRangeAndIntegral(sellingPrince: BigDecimal, storePricing: BigDecimal): (String, Long) = {
    var premiumRange = "未知"
    var premiumIntegral = 0
    var premium: BigDecimal = 0.00

    if (sellingPrince == 0.00 && storePricing == 0.00) {
      premium = 0.00
    } else if (sellingPrince > 0 && storePricing == 0.00) {
      premium = 1.0
    } else if (storePricing > 0) {
      premium = (sellingPrince - storePricing) / storePricing
    }

    premium match {
      case premium if (premium < 0.3) =>
        premiumRange = "30%以下"
        premiumIntegral = 20
      case premium if (premium >= 0.3 && premium <= 0.5) =>
        premiumRange = "30%-50%"
        premiumIntegral = 25
      case premium if (premium > 0.5) =>
        premiumRange = "50%以上"
        premiumIntegral = 30
      case _ =>
        premiumRange = "未知"
        premiumIntegral = 0
    }

    (premiumRange, premiumIntegral)
  }

  /**
   * 计算出版年限区间和相应的积分
   *
   * @param publishingTime 出版时间
   * @return
   */
  def calPublishedYearRangeAndIntegral(publishingTime: String): (String, Long) = {
    val yearDiff = CommonUtils.getBetweenYearWithToday(publishingTime)
    var publishedYearRange = "未知"
    var publishedYearIntegral = 0
    yearDiff match {
      case yearDiff if (yearDiff < 2) =>
        publishedYearRange = "2年以下"
        publishedYearIntegral = 10
      case yearDiff if 2 until 5 contains yearDiff =>
        publishedYearRange = "2-5年"
        publishedYearIntegral = 15
      case yearDiff if (yearDiff > 5) =>
        publishedYearRange = "5年以上"
        publishedYearIntegral = 20
      case _ =>
        publishedYearRange = "未知"
        publishedYearIntegral = 0
    }
    (publishedYearRange, publishedYearIntegral)
  }

  /**
   * 计算在售商家数量区间和相应的积分
   *
   * @param storeNum 在售商家数量
   * @return
   */
  def calSellingStoresRangeAndIntegral(storeNum: Long): (String, Long) = {
    var sellingStoresRange = "未知"
    var sellingStoresIntegral = 0
    storeNum match {
      case storeNum if (storeNum < 3) =>
        sellingStoresRange = "3个以下"
        sellingStoresIntegral = 15
      case storeNum if 3 until 5 contains storeNum =>
        sellingStoresRange = "3-5个"
        sellingStoresIntegral = 20
      case storeNum if (storeNum > 5) =>
        sellingStoresRange = "5个以上"
        sellingStoresIntegral = 25
      case _ =>
        sellingStoresRange = "未知"
        sellingStoresIntegral = 0
    }
    (sellingStoresRange, sellingStoresIntegral)
  }


}
