package com.hucais.etl.common.dao

import com.hucais.etl.job._
import org.apache.spark.sql.{Dataset, SparkSession}

/**
 * 获取Hive数据库的ODS层数据
 */
object OdsHiveDao {

  /**
   * 获取ODS-当当网数据
   *
   * @param sparkSession
   * @param dataSource 数据来源(逆向选品二级渠道)
   * @param startDate  开始日期
   * @param endDate    结束日期
   * @return
   */
  def getOdsDdData(sparkSession: SparkSession, dataSource: String, startDate: String, endDate: String): Dataset[OdsDdData] = {
    import sparkSession.implicits._
    sparkSession.sql(
      s"""
         |with book as (
         |	select isbn,book_name,author,pricing,category,publishing_house,publishing_time,discount,brand,acquisition_timestamp
         |	from
         |	(
         |		select isbn,book_name,author,pricing,category,publishing_house,publishing_time,discount,brand,acquisition_timestamp,
         |		row_number() over(partition by isbn order by acquisition_timestamp desc) as rn
         |		from published.ods_book_information
         |		where acquisition_time>='${startDate}' and acquisition_time<'${endDate}' and data_source ='${dataSource}' and isbn !='' and isbn is not null
         |		and acquisition_timestamp !='' and acquisition_timestamp is not null
         |	)a where a.rn=1
         |),
         |store as (
         |	select isbn,store_name,store_pricing,selling_price,store_sales
         |	from
         |	(
         |		select isbn,name_of_shop as store_name,store_pricing,selling_price,coalesce(book_review,0) as store_sales,
         |		row_number() over(partition by isbn,name_of_shop order by acquisition_timestamp desc) as rn
         |		from published.ods_book_store
         |		where data_source ='${dataSource}' and isbn !='' and isbn is not null
         |		and acquisition_timestamp !='' and acquisition_timestamp is not null
         |	)a where a.rn=1
         |)
         |
         |select
         |	isbn,book_name,author,category,publishing_house,publishing_time,
         |	discount,brand,coalesce(store_pricing,0.00) as hot_store_pricing,coalesce(selling_price,0.00) as hot_selling_price,
         |  coalesce(year_sales,0) as year_sales,coalesce(min_selling_price,0.00) as min_selling_price,
         |  coalesce(avg_selling_price,0.00) as avg_selling_price,store_cnt
         |from (
         |	select
         |		book.isbn,book.book_name,book.author,book.category,book.publishing_house,book.publishing_time,
         |		book.discount,book.brand,store.store_pricing,store.selling_price,
         |    sum(store_sales) over(partition by book.isbn) as year_sales,
         |    min(store.selling_price) over(partition by book.isbn) as min_selling_price,
         |		avg(store.selling_price) over(partition by book.isbn) as avg_selling_price,
         |    count(store.isbn) over(partition by store.isbn) as store_cnt,
         |	  row_number() over(partition by store.isbn order by store.store_sales desc) as rn
         |	from book left join store on book.isbn=store.isbn
         |)a where a.rn=1
         |""".stripMargin).as[OdsDdData]
  }

  def getOdsBook(sparkSession: SparkSession, dataSource: String, startDate: String, endDate: String): Dataset[OdsBookInformation] = {
    import sparkSession.implicits._
    sparkSession.sql(
      s"""
         |select
         |	isbn,book_name,author,pricing,category,
         |	publishing_house,publishing_time,impression,edition,sales_volume,
         |	binding_layout,format,number_of_pages,paper,number_of_suit,data_source
         |from
         |(
         |	select
         |	isbn,book_name,author,pricing,category,
         |	publishing_house,publishing_time,impression,edition,sales_volume,
         |	binding_layout,format,number_of_pages,paper,number_of_suit,data_source,
         |	row_number() over(partition by isbn order by acquisition_timestamp desc) as rn
         |	from published.ods_book_information
         | where acquisition_time>='${startDate}' and acquisition_time<'${endDate}' and data_source ='${dataSource}'
         | and isbn !='' and isbn is not null and acquisition_timestamp !='' and acquisition_timestamp is not null
         |)a where a.rn=1
         |""".stripMargin).as[OdsBookInformation]
  }

  def getOdsStore(sparkSession: SparkSession, dataSource: String, startDate: String, endDate: String): Dataset[OdsBookStore] = {
    import sparkSession.implicits._
    sparkSession.sql(
      s"""
         |select
         |	isbn,name_of_shop,slogan,book_review,selling_price,
         |	store_pricing,conditions,number,data_source
         |from
         |(
         |	select
         |		isbn,name_of_shop,slogan,book_review,selling_price,
         |		store_pricing,conditions,number,data_source,
         |		row_number() over(partition by isbn,name_of_shop order by acquisition_timestamp desc) as rn
         |	from published.ods_book_store
         |	where acquisition_time>='${startDate}' and acquisition_time<'${endDate}' and data_source ='${dataSource}'
         |	and isbn !='' and isbn is not null and acquisition_timestamp !='' and acquisition_timestamp is not null
         |)a where a.rn=1
         |""".stripMargin).as[OdsBookStore]
  }

  def getOdsComment(sparkSession: SparkSession, dataSource: String, startDate: String, endDate: String): Dataset[OdsBookComment] = {
    import sparkSession.implicits._
    sparkSession.sql(
      s"""
         |select store_name,book_name,score,data_source
         |from published.ods_book_comment
         |where acquisition_time>='${startDate}' and acquisition_time<'${endDate}' and data_source ='${dataSource}'
         |and store_name is not null and store_name !='' and book_name is not null and book_name !=''
         |and acquisition_timestamp is not null and acquisition_timestamp !=''
         |""".stripMargin).as[OdsBookComment]
  }

  def getOdsOpenBook(sparkSession: SparkSession): Dataset[OdsOpenBook] = {
    import sparkSession.implicits._
    sparkSession.sql(
      s"""
         |select isbn,book_name,selling_price,discount_rate,author,category,
         |publishing_house,month_sales,year_sales,total_sales,book_list,
         |channel_type,sale_type,sale_time
         |from published.ods_openbooks
         |""".stripMargin).as[OdsOpenBook]
  }


}
