package com.o2o.cleaning.month.platform.ebusiness_plat.taobao

import java.lang

import com.alibaba.fastjson.{JSON, JSONObject}
import com.o2o.utils.Iargs
import org.apache.spark.rdd.RDD
import org.apache.spark.sql.SparkSession

import scala.util.Random

/**
  * @ Auther: o2o-rd-0008
  * @ Date:   2020/11/6 10:46
  * @ Param:  ${PARAM}
  * @ Description: 
  */
object TaobaoShuang11Deal {
  def main(args: Array[String]): Unit = {
    val spark = SparkSession.builder()
      .appName(s"${this.getClass.getSimpleName}")
      .config("spark.debug.maxToStringFields", "2000")
      .config("spark.serializer", "org.apache.spark.serializer.KryoSerializer")
      .getOrCreate()

    val sc = spark.sparkContext
    sc.hadoopConfiguration.set("fs.s3a.access.key", Iargs.OBSACCESS)
    sc.hadoopConfiguration.set("fs.s3a.secret.key", Iargs.OBSSECRET)
//    sc.hadoopConfiguration.set("fs.s3a.endpoint", Iargs.OBSENDPOINT)
    sc.setLogLevel("WARN")


    val taobaoDealRDD: RDD[String] = spark.read.orc("s3a://dws-data/g_data/2020/10/taobao/").toJSON.rdd.map(line => {
      val nObject: JSONObject = JSON.parseObject(line)

      val sell: lang.Long = nObject.getLong("sellCount")

      val random = new Random()
      val d: Double = random.nextInt(10) / 10.0 + 6

      val sellCount: Int = scala.math.ceil(sell / d).toInt

      val priceText: lang.Double = nObject.getDouble("priceText")

      nObject.put("sellCount", sellCount)
      nObject.put("salesAmount", (sellCount * priceText).formatted("%.2f").toDouble)

      nObject.toString


    })


    spark.read.json(taobaoDealRDD).write.orc("s3a://dws-data/g_data/2020/shuang11/2020_11_1_2020_tui/taobao")

    spark.read.orc("s3a://dws-data/g_data/2020/shuang11/2020_11_1_2020_tui/taobao").registerTempTable("tab")

    spark.sql(
      """
        |select
        |count(1),
        |sum(sellCount) sell,
        |sum(salesAmount) sales
        |from
        |tab
      """.stripMargin).show(false)
  }
}
