package com.hucais.etl.job.service

import java.{lang, util}

import com.hucais.core.constant.Constants
import com.hucais.etl.common.bean.DDCategory
import com.hucais.etl.common.dao.{DwdHiveDao, MysqlDao}
import com.hucais.etl.common.service.{CommonQueryService, CommonSelectService}
import org.apache.spark.SparkContext
import org.apache.spark.broadcast.Broadcast
import org.apache.spark.sql.{SaveMode, SparkSession}

object DwsSelectService {

  private val HDFS_PATH = "/hucaisdata/published/dws/book_select_info"

  def action(ssc: SparkContext, sparkSession: SparkSession, startDate: String, endDate: String): Unit = {
    // 获取各分类权重映射
    val categoryWeightMap = MysqlDao.getCategoryWeightMap
    val categoryWeightMapBd: Broadcast[util.Map[String, DDCategory]] = ssc.broadcast(categoryWeightMap)
    // 获取出版社选品映射
    val publishingHouseMap = MysqlDao.getPublishingHouseMap
    val publishingHouseMapBd: Broadcast[util.Map[String, lang.Long]] = ssc.broadcast(publishingHouseMap)
    // 大学高校数据Map
    val universityMap = MysqlDao.getUniversityMap
    val universityMapBd: Broadcast[util.Map[String, util.List[String]]] = ssc.broadcast(universityMap)

    //获取DWD层-书籍基本数据
    val bookDS = DwdHiveDao.getDwdBookBaseInfoWithAuthorPopularity(sparkSession, startDate, endDate).coalesce(2)
    bookDS.cache()

    // 计算总积分
    val resultDS = CommonSelectService.calSelectInfoIntegral(sparkSession, bookDS, publishingHouseMapBd,
      categoryWeightMapBd, universityMapBd)

    //结果数据添加到Hive
    CommonQueryService.saveAsFileAbsPath(resultDS.coalesce(1).toDF(), HDFS_PATH, Constants.HIVE_SPLIT_STR, SaveMode.Append)
  }

}
