package com.hucais.etl.job.service

import java.util

import cn.hutool.core.date.DateUtil
import com.hucais.core.constant.Constants
import com.hucais.core.utils.CommonUtils
import com.hucais.etl.common.bean.{DDCategory, DwdBookBaseInfo}
import com.hucais.etl.common.dao.{DwdHiveDao, MysqlDao, OdsHiveDao}
import com.hucais.etl.common.service.{CommonQueryService, CommonSelectService, JavaCommonService}
import com.hucais.etl.job.OdsDdData
import org.apache.spark.SparkContext
import org.apache.spark.broadcast.Broadcast
import org.apache.spark.sql.{Dataset, SaveMode, SparkSession}

import scala.collection.mutable.ArrayBuffer
import scala.math.BigDecimal.RoundingMode

/**
 * 目前只对当当网数据操作
 */
object DwdBookBaseInfoService {

  private val HDFS_PATH = "/hucaisdata/published/dwd/book_base_info_snapshoot"

  /**
   * 对现有更新的数据进行ETL处理
   *
   * @param ssc
   * @param sparkSession
   * @param startDate 开始日期
   * @param endDate   结束日期
   */
  def action(ssc: SparkContext, sparkSession: SparkSession, startDate: String, endDate: String): Unit = {
    // 获取当当历史基础数据
    val odsBookDS = OdsHiveDao.getOdsDdData(sparkSession, Constants.SECOND_CHANNEL_DD, startDate, endDate)
    val bookDS = DwdBookBaseInfoService.explodeData(sparkSession, odsBookDS)
    bookDS.cache()

    val secondCategoryToKeyMap = MysqlDao.getSecondCategoryToKeyMap
    val secondCategoryToKeyMapBd: Broadcast[util.Map[String, DDCategory]] = ssc.broadcast(secondCategoryToKeyMap)
    val cleanbookDS = DwdBookBaseInfoService.cleanDdBook(sparkSession, bookDS, secondCategoryToKeyMapBd)
    cleanbookDS.cache()
    cleanbookDS.createOrReplaceTempView(Constants.SPARK_VIEW_BOOK)

    // 获取豆瓣评分数据
    val doubanSocreDS = DwdHiveDao.getDoubanScore(sparkSession, startDate, endDate)
    doubanSocreDS.cache()
    doubanSocreDS.createOrReplaceTempView(Constants.SPARK_VIEW_DOUBAN)

    //补充豆瓣评分数据
    val resultDS: Dataset[DwdBookBaseInfo] = CommonSelectService.fixDoubanScore(sparkSession)

    //结果数据添加到Hive的快照表
    CommonQueryService.saveAsFileAbsPath(resultDS.coalesce(1).toDF(), HDFS_PATH, Constants.HIVE_SPLIT_STR, SaveMode.Append)
  }

  /**
   * 展开多种分类的书籍数据
   *
   * @param sparkSession
   * @param bookDS
   * @return
   */
  def explodeData(sparkSession: SparkSession, bookDS: Dataset[OdsDdData]): Dataset[OdsDdData] = {
    import sparkSession.implicits._
    bookDS.mapPartitions(partitions => {
      partitions
        .filter(item => CommonUtils.isNotBlankExt(item.isbn) && CommonUtils.isNotBlankExt(item.book_name) &&
          CommonUtils.isNotBlankExt(item.author) && CommonUtils.isNotBlankExt(item.publishing_house) &&
          CommonUtils.isNotBlankExt(item.publishing_time) && CommonUtils.isNotBlankExt(item.category)
        )
    }).flatMap(item => flatMapCategory(item).toIterator)
  }

  private def flatMapCategory(item: OdsDdData): ArrayBuffer[OdsDdData] = {
    val list = new ArrayBuffer[OdsDdData]()
    val arr: Array[String] = item.category.split("-图书")
    var category: String = ""

    if (arr.length == 1) {
      list.append(
        OdsDdData(item.isbn, item.book_name, item.author, item.category, item.publishing_house, item.publishing_time,
          item.discount, item.brand, item.hot_store_pricing, item.hot_selling_price, item.year_sales,
          item.min_selling_price, item.avg_selling_price, item.store_cnt
        )
      )
    } else {
      for (index <- arr.indices) {
        if (index == 0) {
          category = arr(index)
          list.append(
            OdsDdData(item.isbn, item.book_name, item.author, item.category, item.publishing_house, item.publishing_time,
              item.discount, item.brand, item.hot_store_pricing, item.hot_selling_price, item.year_sales,
              item.min_selling_price, item.avg_selling_price, item.store_cnt
            )
          )
        } else {
          category = "图书" + arr(index)
          list.append(
            OdsDdData(item.isbn, item.book_name, item.author, item.category, item.publishing_house, item.publishing_time,
              item.discount, item.brand, item.hot_store_pricing, item.hot_selling_price, item.year_sales,
              item.min_selling_price, item.avg_selling_price, item.store_cnt
            )
          )
        }
      }
    }
    list
  }


  private def cleanDdBook(sparkSession: SparkSession, bookDS: Dataset[OdsDdData],
                          secondCategoryToKeyMapBd: Broadcast[util.Map[String, DDCategory]]): Dataset[DwdBookBaseInfo] = {
    import sparkSession.implicits._
    bookDS.mapPartitions(partitions => {
      partitions
        .filter(item => CommonUtils.isNotBlankExt(item.isbn) && CommonUtils.isNotBlankExt(item.book_name) &&
          CommonUtils.isNotBlankExt(item.author) && CommonUtils.isNotBlankExt(item.publishing_house) &&
          CommonUtils.isNotBlankExt(item.publishing_time) && CommonUtils.isNotBlankExt(item.hot_store_pricing) &&
          CommonUtils.isNotBlankExt(item.hot_selling_price))
        .filter(item =>item.isbn.startsWith(Constants.GENUINE_BOOK_PRE))
        .map(item => {
          val publishingHouse = item.publishing_house.replaceAll("<span class=\"text-value\">", "");
          val bookName = CommonUtils.filterSomeStr(item.book_name)
          val categoryInfo: DDCategory = JavaCommonService.categorizeDdBook(item.category, secondCategoryToKeyMapBd.value)

          // 定价
          val store_pricing = CommonUtils.handleBlankFloat(item.hot_store_pricing)
          val selling_price = CommonUtils.handleBlankFloat(item.hot_selling_price)
          val min_selling_price = CommonUtils.handleBlankFloat(item.min_selling_price)
          val avg_selling_price = CommonUtils.handleBlankFloat(item.avg_selling_price)
          val store_cnt = CommonUtils.handleBlankNumber(item.store_cnt)
          val year_sales = CommonUtils.handleBlankNumber(item.year_sales)

          val discount = CommonUtils.handleBlankFloat(item.discount)

          var brand = "无"
          if (CommonUtils.isNotBlankExt(item.brand)) {
            brand = CommonUtils.filterSpiderData(item.brand)
          }

          val doubanScore = Constants.NORMAL_FLOAT_VAL
          val grader_cnt = Constants.NORMAL_NUMBER_VAL
          val currentTime = DateUtil.format(new java.util.Date(), "yyyy-MM-dd HH:mm:ss")

          DwdBookBaseInfo(
            Constants.FIRST_CHANNEL_REVERSE_SELECT, Constants.SECOND_CHANNEL_DD, item.isbn, bookName, categoryInfo.getFirstCategory,
            categoryInfo.getSecondCategory, categoryInfo.getThirdCategory, categoryInfo.getFourthCategory, item.author,
            publishingHouse, item.publishing_time, selling_price, min_selling_price, avg_selling_price, store_cnt, store_pricing,
            year_sales, discount, brand, doubanScore, grader_cnt, currentTime
          )
        })
    })


  }

  /**
   * 对历史的数据进行ETL处理
   *
   * @param ssc
   * @param sparkSession
   * @param startDate 开始日期
   * @param endDate   结束日期
   */
  def actionOnHistoryData(ssc: SparkContext, sparkSession: SparkSession, startDate: String, endDate: String): Unit = {
    // 获取归类分类参考数据
    val categoryMap = MysqlDao.getDDHistoryCategoryMap
    val categoryMapBd: Broadcast[util.Map[String, DDCategory]] = ssc.broadcast(categoryMap)

    // 获取当当历史基础数据
    val odsBookDS = OdsHiveDao.getOdsDdData(sparkSession, Constants.SECOND_CHANNEL_DD, startDate, endDate)
    val bookDS = DwdBookBaseInfoService.cleanDdHistoryBook(sparkSession, odsBookDS, categoryMapBd)
    bookDS.cache()
    //结果数据添加到Hive
    CommonQueryService.saveAsFileAbsPath(bookDS.coalesce(1).toDF(), DwdBookBaseInfoService.HDFS_PATH, Constants.HIVE_SPLIT_STR, SaveMode.Append)
  }

  /**
   * 清洗当当网的历史书籍数据
   *
   * @param sparkSession
   * @param bookDS        需要清洗的书籍数据DataSet
   * @param categoryMapBd 归类分类参考数据
   * @return
   */
  private def cleanDdHistoryBook(sparkSession: SparkSession, bookDS: Dataset[OdsDdData],
                                 categoryMapBd: Broadcast[util.Map[String, DDCategory]]): Dataset[DwdBookBaseInfo] = {
    import sparkSession.implicits._
    bookDS.mapPartitions(partitions => {
      partitions
        .filter(item => CommonUtils.isNotBlankExt(item.isbn) && CommonUtils.isNotBlankExt(item.book_name) &&
          CommonUtils.isNotBlankExt(item.author) && CommonUtils.isNotBlankExt(item.publishing_house) &&
          CommonUtils.isNotBlankExt(item.publishing_time) && CommonUtils.isNotBlankExt(item.hot_store_pricing) &&
          CommonUtils.isNotBlankExt(item.hot_selling_price))
        .filter(item =>item.isbn.startsWith(Constants.GENUINE_BOOK_PRE))
        .map(item => {
          val publishingHouse = item.publishing_house.replaceAll("<span class=\"text-value\">", "");
          val bookName = CommonUtils.filterSomeStr(item.book_name)
          val categoryInfo: DDCategory = JavaCommonService.categorizeDdHistoryBook(item.category, categoryMapBd.value)

          // 定价
          val store_pricing = CommonUtils.handleBlankFloat(item.hot_store_pricing)
          val selling_price = CommonUtils.handleBlankFloat(item.hot_selling_price)
          val min_selling_price = CommonUtils.handleBlankFloat(item.min_selling_price)
          val avg_selling_price = CommonUtils.handleBlankFloat(item.avg_selling_price)
          val store_cnt = CommonUtils.handleBlankNumber(item.store_cnt)
          val year_sales = CommonUtils.handleBlankNumber(item.year_sales)

          //填充默认值
          var brand = "无"
          if (CommonUtils.isNotBlankExt(item.brand)) {
            brand = CommonUtils.filterSpiderData(item.brand)
          }

          val doubanScore = Constants.NORMAL_FLOAT_VAL
          val grader_cnt = Constants.NORMAL_NUMBER_VAL
          val currentTime = DateUtil.format(new java.util.Date(), "yyyy-MM-dd HH:mm:ss")

          var discount: Float = Constants.NORMAL_DISCOUNT_VAL
          if (store_pricing > 0.0 && selling_price > 0.0) {
            val tmpDisCount = (BigDecimal.decimal(selling_price) / BigDecimal.decimal(store_pricing)) * 10
            discount = tmpDisCount.setScale(2, RoundingMode.HALF_UP).floatValue()
          }

          DwdBookBaseInfo(
            Constants.FIRST_CHANNEL_REVERSE_SELECT, Constants.SECOND_CHANNEL_DD, item.isbn, bookName, categoryInfo.getFirstCategory,
            categoryInfo.getSecondCategory, categoryInfo.getThirdCategory, categoryInfo.getFourthCategory, item.author,
            publishingHouse, item.publishing_time, selling_price, min_selling_price, avg_selling_price, store_cnt,
            store_pricing, year_sales, discount, brand, doubanScore, grader_cnt, currentTime
          )
        })
    })
  }

}
