package com.hucais.etl.job.service

import cn.hutool.core.date.DateUtil
import cn.hutool.core.lang.Dict
import cn.hutool.json.JSONUtil
import com.hucais.core.constant.Constants
import com.hucais.core.utils.CommonUtils
import com.hucais.etl.common.service.CommonQueryService
import com.hucais.etl.job.{DwdRanking, OdsRanking}
import org.apache.spark.SparkContext
import org.apache.spark.internal.Logging
import org.apache.spark.sql.{SaveMode, SparkSession}

import scala.collection.mutable.ArrayBuffer

object DwdRankingService extends Logging {

  private val HDFS_PATH = "/hucaisdata/published/dwd/ranking/"

  def action(ssc: SparkContext, sparkSession: SparkSession): Unit = {
    import sparkSession.implicits._
    val tmpDS = sparkSession.sql(
      s"""
         |select channel,year,month,ranking_name,detail
         |from
         |(
         |	select channel,year,month,ranking_name,detail,
         |	row_number() over(partition by channel,year,month,ranking_name order by acquisition_time desc) as rn
         |	from published.ods_ranking
         |  where detail is not null and detail !=''
         |)a where a.rn=1
         |""".stripMargin).as[OdsRanking]

    val resultDS = tmpDS.mapPartitions(partitions => {
      partitions
        .filter(item => JSONUtil.isJson(item.detail))
        .filter(item =>
          CommonUtils.isNotBlankExt(item.channel) && CommonUtils.isNotBlankExt(item.year) &&
            CommonUtils.isNotBlankExt(item.month) && CommonUtils.isNotBlankExt(item.ranking_name)
        )
        .flatMap(item => handleJsonDetail(item).toIterator)
    })

    //结果数据添加到Hive
    CommonQueryService.saveAsFileAbsPath(resultDS.coalesce(1).toDF(), HDFS_PATH, Constants.HIVE_SPLIT_STR, SaveMode.Append)
  }

  private def handleJsonDetail(item: OdsRanking): ArrayBuffer[DwdRanking] = {
    val resArr = new ArrayBuffer[DwdRanking]()

    try {
      val jSONArray = JSONUtil.parseArray(item.detail)
      val list = JSONUtil.toList(jSONArray, classOf[Dict])
      val iterator = list.iterator()
      while (iterator.hasNext) {
        val dict = iterator.next()
        val ranking = dict.getInt("ranking").longValue()
        val book_name = dict.getStr("book_name")
        val isbn = dict.getStr("isbn")
        val author = dict.getStr("author")
        val publishingHouse = dict.getStr("publishing_house")
        val brand = dict.getStr("brand")
        val publishingTime = dict.getStr("publishing_time")
        val sellingPrice = dict.getStr("selling_price")
        val storePricing = dict.getStr("store_pricing")
        val score = dict.getStr("score")
        val graderCnt = dict.getStr("grader_cnt")
        val currentTime = DateUtil.format(new java.util.Date(), "yyyy-MM-dd HH:mm:ssZ").replace(" ", "T")

        resArr.append(
          DwdRanking(item.channel, item.year, item.month, item.ranking_name, ranking,
            book_name, isbn, author, publishingHouse, brand,
            publishingTime, sellingPrice, storePricing, score, graderCnt, currentTime
          )
        )
      }
    } catch {
      case e: Exception =>
        logError(
          s"""
             |解析字段detail的JSON数据出错,channel:${item.channel},year:${item.year},month:${item.month},ranking_name:${item.ranking_name},
             |detail:[${item.detail}]
             |""".stripMargin, e
        )
    }

    resArr
  }


}
