package com.hucais.agg.dao

import cn.hutool.core.date.DateUtil
import com.hucais.agg.bean._
import com.hucais.core.utils.DefaultPropertiesUtil
import org.apache.spark.sql.{Dataset, SparkSession}
import org.elasticsearch.spark.sql.EsSparkSQL

object SourceIndexDataDao {

  /**
   * 获取全文见检索明细数据
   *
   * @param sparkSession
   * @return
   */
  def getSearchInfoData(sparkSession: SparkSession): Dataset[SearchInfoPart] = {
    import sparkSession.implicits._
    // 构造查询条件
    val yesterday = DateUtil.format(DateUtil.yesterday(), "yyy-MM-dd")
    val today = DateUtil.today()

    val esQuery =
      s"""
         |{
         |  "query": {
         |    "match_all": {}
         |  },
         |  "_source":{
         |        "includes":["second_channel","isbn","publishing_house","category","store_pricing","selling_price","sales","uploader","copyright_month"]
         |    }
         |}
      """.stripMargin
    /*
        val esQuery =
          s"""
             |{
             |  "query": {
             |    "range": {
             |      "create_time": {
             |        "gte": "$yesterday",
             |        "lt": "$today",
             |        "format": "yyyy-MM-dd"
             |      }
             |    }
             |  },
             |  "_source":{
             |        "includes":["second_channel","publishing_house","category","store_pricing","selling_price","sales","uploader","copyright_month"]
             |    }
             |}
          """.stripMargin
    */
    //    val searchInfoDS = EsSparkSQL.esDF(sparkSession, DefaultPropertiesUtil.get("index.book.base.info"), esQuery)
    val tmpDF = EsSparkSQL.esDF(sparkSession, DefaultPropertiesUtil.get("dws.book.query.info"), esQuery)
    val searchInfoDS = tmpDF.withColumn("sales", tmpDF("sales").cast("Long")).as[SearchInfoPart]
    searchInfoDS.createOrReplaceTempView("tmp_book_base_info")

    // ISBN+定价过滤逆向选品有效数据
    sparkSession.sql(
      s"""
         |select second_channel,isbn,publishing_house,category,store_pricing,selling_price,sales,uploader,copyright_month
         |from
         |(
         |	select
         |		second_channel,isbn,publishing_house,category,store_pricing ,selling_price,sales ,uploader ,copyright_month ,
         |		row_number() over(partition by isbn,store_pricing order by selling_price desc) rk
         |	from tmp_book_base_info
         |  where isbn is not null and store_pricing is not null and selling_price is not null
         |)a where a.rk=1
         |""".stripMargin).as[SearchInfoPart]
  }

  /**
   * 获取全文见检索明细数据(扩展版)
   *
   * @param sparkSession
   * @return
   */
  def getSearchInfoDataExt(sparkSession: SparkSession): Dataset[BookBaseInfoExt] = {
    val bookBaseInfoDS = getSearchInfoData(sparkSession)
    import sparkSession.implicits._
    //计算数据的溢价倍数和溢价类型
    bookBaseInfoDS.mapPartitions(partitions => {
      partitions
        .filter(item => {
          item.store_pricing > 0.00 && item.selling_price > 0.00
        })
        .map(item => {
          val sellingPrice = BigDecimal(item.selling_price)
          val storePricing = BigDecimal(item.store_pricing)

          // 计算溢价倍数
          val premium = (sellingPrice - storePricing) / storePricing
          var premiumMultiple = "未知"
          premium match {
            case premium if (premium < 2) =>
              premiumMultiple = "2倍以下"
            case premium if (premium >= 2 && premium <= 5) =>
              premiumMultiple = "2-5倍"
            case premium if (premium > 5 && premium <= 10) =>
              premiumMultiple = "5-10倍"
            case premium if (premium > 10) =>
              premiumMultiple = "10倍以上"
          }

          // 计算溢价类型
          val priceDiff = sellingPrice - storePricing
          var premiumType = "未知"
          priceDiff match {
            case priceDiff if (priceDiff < 0) =>
              premiumType = "折扣"
            case priceDiff if (priceDiff >= 0 && priceDiff <= 3) =>
              premiumType = "正价"
            case priceDiff if (priceDiff > 3) =>
              premiumType = "溢价"
          }

          BookBaseInfoExt(
            item.second_channel, item.isbn, item.publishing_house, item.category, item.store_pricing,
            item.selling_price, item.sales, item.uploader, item.copyright_month, premiumMultiple,
            premiumType)
        })
    })
  }

  /**
   * 获取开卷数据
   *
   * @param sparkSession
   * @return
   */
  def getOpenData(sparkSession: SparkSession): Dataset[OpenBooksPart] = {
    import sparkSession.implicits._
    val yesterday = DateUtil.format(DateUtil.yesterday(), "yyy-MM-dd")
    val today = DateUtil.today()

    val esQuery =
      s"""
         |{
         |  "query": {
         |    "match_all": {}
         |  },
         |  "_source":{
         |        "includes":["isbn","sale_time","category","publishing_house","month_sales"]
         |    }
         |}
      """.stripMargin

    /* val esQuery =
       s"""
          |{
          |  "query": {
          |    "bool": {
          |      "must": [
          |        {"term": {
          |          "sale_time": "2016-01"
          |        }},
          |        {"term": {
          |          "category": "社科"
          |        }}
          |      ]
          |    }
          |  },
          |  "_source":{
          |        "includes":["isbn","sale_time","category","publishing_house","month_sales"]
          |    }
          |}
       """.stripMargin*/
    val tmpDF = EsSparkSQL.esDF(sparkSession, DefaultPropertiesUtil.get("ods.opendata"), esQuery)

    val colArray = Array("isbn", "month_sales")
    tmpDF.dropDuplicates(colArray).withColumn("month_sales", tmpDF("month_sales").cast("Long")).as[OpenBooksPart]
  }

  /**
   * 获取上架产品数据
   *
   * @param sparkSession
   * @return
   */
  def getShelfProduct(sparkSession: SparkSession): Dataset[ShelfProductPart] = {
    import sparkSession.implicits._
    val yesterday = DateUtil.format(DateUtil.yesterday(), "yyy-MM-dd")
    val today = DateUtil.today()

    val esQuery =
      s"""
         |{
         |  "query": {
         |    "match_all": {}
         |  },
         |  "_source":{
         |        "includes":["channel","copyright_person","shelf_time"]
         |    }
         |}
      """.stripMargin
    //    EsSparkSQL.esDF(sparkSession, DefaultPropertiesUtil.get("ods.shelf.products"), esQuery).as[ShelfProductPart]
    EsSparkSQL.esDF(sparkSession, DefaultPropertiesUtil.get("ods.shelf.products"), esQuery).as[ShelfProductPart]
  }

  /**
   * 获取产品动销情况数据
   *
   * @param sparkSession
   * @return
   */
  def getProductSales(sparkSession: SparkSession): Dataset[ProductSalesPart] = {
    import sparkSession.implicits._
    val yesterday = DateUtil.format(DateUtil.yesterday(), "yyy-MM-dd")
    val today = DateUtil.today()

    val esQuery =
      s"""
         |{
         |  "query": {
         |    "match_all": {}
         |  },
         |  "_source":{
         |        "includes":["statistical_month","sales"]
         |    }
         |}
      """.stripMargin
    //    EsSparkSQL.esDF(sparkSession, DefaultPropertiesUtil.get("ods.product.sales.info"), esQuery)
    val tmpDF = EsSparkSQL.esDF(sparkSession, DefaultPropertiesUtil.get("ods.product.sales.info"), esQuery)
    tmpDF.withColumn("sales", tmpDF("sales").cast("Long")).as[ProductSalesPart]
  }
}
