package com.hucais.etl.job.service

import cn.hutool.core.date.DateUtil
import com.hucais.core.constant.Constants
import com.hucais.core.utils.CommonUtils
import com.hucais.etl.common.service.CommonQueryService
import org.apache.spark.SparkContext
import org.apache.spark.sql.{SaveMode, SparkSession}

object DwdDoubanScoreService {

  private val HDFS_PATH = "/hucaisdata/published/dwd/douban_score/"

  def action(ssc: SparkContext, sparkSession: SparkSession, startDate: String, endDate: String): Unit = {
    import sparkSession.implicits._
    val tmpDF = sparkSession.sql(
      s"""
         |select isbn,query_book,book,author,score,grader_cnt,comments
         |from
         |(
         |	select isbn,query_book,book,author,score,grader_cnt,comments,
         |	row_number() over(partition by isbn,book order by acquisition_time desc) as rn
         |	from published.ods_douban_score
         |	where acquisition_time>='${startDate}' and acquisition_time<'${endDate}'
         |)a where a.rn=1
         |""".stripMargin)

    val resultDS = tmpDF.mapPartitions(partitions => {
      partitions
        .filter(item => CommonUtils.isNotBlankExt(item.getString(0)) && CommonUtils.isNotBlankExt(item.getString(1)) &&
          CommonUtils.isNotBlankExt(item.getString(2)) && CommonUtils.isNotBlankExt(item.getString(3)))
        .map(item => {
          val isbn = CommonUtils.filterSomeStr(item.getString(0))
          val query_book = CommonUtils.filterSomeStr(item.getString(1))
          val book = CommonUtils.filterSomeStr(item.getString(2))
          val author = CommonUtils.filterSomeStr(item.getString(3))
          val score = CommonUtils.handleBlankFloat(item.getString(4))
          val grader_cnt = CommonUtils.handleBlankNumber(item.getString(5))
          val currentTime = DateUtil.format(new java.util.Date(), "yyyy-MM-dd HH:mm:ssZ").replace(" ", "T")

          TmpOdsDoubanScore(isbn, query_book, book, author, score, grader_cnt, currentTime, item.getString(6))
        })
    })

    //结果数据添加到Hive
    CommonQueryService.saveAsFileAbsPath(resultDS.coalesce(1).toDF(), HDFS_PATH, Constants.HIVE_SPLIT_STR, SaveMode.Append)
  }


  case class TmpOdsDoubanScore(
                                isbn: String,
                                query_book: String,
                                book: String,
                                author: String,
                                score: Float,
                                grader_cnt: Long,
                                create_time: String,
                                comments: String
                              )


}
