package a_o2odata_deal.utils

import a_aa_amainpackage.a_o2odata_deal.config.FieldsConfig.sellandpricerestart
import org.apache.spark.sql.{Dataset, Row, SQLContext}

/**
  * @ Auther: o2o-rd-0008
  * @ Date:   2019/9/3 16:59
  * @ Param:  ${PARAM}
  * @ Description: 
  */
object zhibo_price {
  def handle_price(sqlContext: SQLContext):Dataset[Row]={
    val sellpriceres = sellandpricerestart(sqlContext)
    sellpriceres
      .repartition(5).write.json("s3a://private/private_han/TM/2019/8/all_good")

    val zhibo_df = sqlContext.read.json("s3a://private/private_han/TM/2019/8/zhibo_good/*")
    zhibo_df.registerTempTable("zhibo_df")
    sellpriceres.createOrReplaceTempView("sellpriceres")
    val zhibo_data =  sqlContext.sql(
      """
        |select
        |t1.*,
        |case when t2.good_id is null then t1.priceText else 10 end as price
        |from
        |sellpriceres t1
        |left join
        |zhibo_df t2
        |on t1.good_id = t2.good_id
      """.stripMargin).drop("priceText","salesAmount")
      .withColumnRenamed("price","priceText")
    zhibo_data.registerTempTable("zhibo_data")
    val data_res = sqlContext.sql(
      """
        |select *,(sellCount*priceText) as salesAmount from zhibo_data
      """.stripMargin).dropDuplicates("good_id")

    data_res
  }

}
