import com.o2o.utils.Iargs
import org.apache.spark.sql.SparkSession
import org.apache.spark.sql.functions._
/**
  * @ Auther: o2o-rd-0008
  * @ Date:   2020/6/5 16:23
  * @ Param:  ${PARAM}
  * @ Description: 
  */
object InsertMTDZObsData {
  def main(args: Array[String]): Unit = {

    val spark = SparkSession.builder()
      .appName(s"${this.getClass.getSimpleName}")
      .config("spark.debug.maxToStringFields", "2000")
      .config("spark.serializer", "org.apache.spark.serializer.KryoSerializer")
      .config("spark.sql.caseSensitive", "true")
      .config("es.nodes", "192.168.1.29")
      .config("es.port", "9200")
      .config("cluster.name","O2OElastic")
      .config("es.net.http.auth.user", "elastic")
      .config("es.net.http.auth.pass", "changeme")
      .master("local[*]")
      .getOrCreate()

    val sc = spark.sparkContext
    sc.hadoopConfiguration.set("fs.s3a.access.key", Iargs.OBSACCESS)
    sc.hadoopConfiguration.set("fs.s3a.secret.key", Iargs.OBSSECRET)
    sc.hadoopConfiguration.set("fs.s3a.endpoint", Iargs.OBSENDPOINT)
    sc.setLogLevel("WARN")

//    val miyaFromPath = "s3a://o2o-dataproces-group/zsc/2020/618/miya/good/"
//    val miyaFromPath = "s3a://o2o-dataproces-group/zhang_feng/dws-data/esdata/2020/wangyiyanxuan/618/"
//    val miyaFromPath = "s3a://o2o-dataproces-group/li_yinchao/lyc/platform/jumei/2020/618/jumei_2020_6/"
//    val miyaFromPath = "s3a://dws-data/g_data/618/20190618_2020_yoy/meituan_tg/"
//
//    val miyaToPath = "s3a://dws-data/g_data/618/20200618_2020_0617_yoy/jumei/"
//    val miyaToPath = "s3a://dws-data/g_data/618/20200618_2020_0617_yoy/meituan_tg/"
//    val miyaToPath = "s3a://dws-data/g_data/618/20200618_2020_0617_yoy/meituan_tg/"

    spark.read.orc("s3a://dws-data/g_data/2020/5/meituan_tg/").registerTempTable("t")
    spark.sql(
      """
        |select
        |*,
        |cast(ceil(sellCount/30.0*18) as bigint) as sellCount_tmp,
        |cast(salesAmount/30.0*18 as decimal(20,2)) as salesAmount_tmp
        |from t
        |""".stripMargin)
      .drop("sellCount","salesAmount","timeStamp")
      .withColumn("timeStamp", lit("1592409600"))
      .withColumnRenamed("sellCount_tmp","sellCount")
      .withColumnRenamed("salesAmount_tmp","salesAmount")
      .repartition(4).write.orc("s3a://dws-data/g_data/2020/618/meituan_tg/")












  /*  val df: DataFrame = spark.read.orc(miyaFromPath)
    val value: RDD[String] = df.toJSON.rdd.map(line => {
      val nObject: JSONObject = JSON.parseObject(line)

      val sellCount = nObject.getOrDefault("sellCount", "0").toString
      val salesAmount = nObject.getOrDefault("salesAmount", "0").toString

//      val randomnum = scala.util.Random.nextInt(2)
//      val num = randomnum / 10.0 + 1.0
      val num = 1.0
      val rate = 0.78

      val sellCountnew = (sellCount.toInt * num * rate).toInt
      val salesAmountnew = (salesAmount.toDouble * num * rate).formatted("%.2f")

      nObject.put("sellCount", sellCountnew)
      nObject.put("salesAmount", salesAmountnew)
      nObject.toString
    })

    spark.read.json(value).write.orc(miyaToPath)*/

/*


    spark.read.orc(miyaToPath).registerTempTable("tab")
   spark.sql(
      """
        |
        |select
        |count(1),
        |sum(sellCount),
        |sum(salesAmount)
        |from
        |tab
      """.stripMargin).show()
*/















//    var resultPath = s"s3a://o2o-dataproces-group/zsc/2020/618/miya/good"
//    val path =s"s3a://o2o-sourcedata/obs-source-2020/618/gov/meituan_tg/"
//    val path =s"s3a://o2o-sourcedata/obs-source-2020/618/gov/dazhongdp/"
//val sourceData_url =  s"s3a://o2o-sourcedata/obs-source-2020/618/gov/jumei"


//    spark.read.orc("s3a://dws-data/g_data/2019/5/meituan_tg/").registerTempTable("t")
//    spark.sql(
//      """
//        |
//        |select
//        |count(1),
//        |sum(sellCount),
//        |sum(salesAmount)
//        |from
//        |t
//      """.stripMargin).show()

//    spark.sql(
//      """
//        |select
//        |*,
//        |cast(ceil(sellCount/30.0*18) as bigint) as sellCount_tmp,
//        |cast(salesAmount/30.0*18 as decimal(20,2)) as salesAmount_tmp
//        |from t
//        |""".stripMargin)
//      .drop("sellCount","salesAmount","timeStamp")
//      .withColumn("timeStamp", lit("1592409600"))
//      .withColumnRenamed("sellCount_tmp","sellCount")
//      .withColumnRenamed("salesAmount_tmp","salesAmount")
//      .repartition(4).write.orc("s3a://dws-data/g_data/2020/618/meituan_tg/")

    /*spark.read.orc(path).registerTempTable("miya")
    println("商品条数：")

    spark.sqlContext.sql(
      """
        |
        |select
        |count(1)
        |from
        |miya
        |
      """.stripMargin).show()
    println("时间戳：")
    spark.sqlContext.sql(
      """
        |
        |select
        |timeStamp
        |from
        |miya
        |group by timeStamp
        |
      """.stripMargin).show()
    println("销售量和销售额")
    spark.sqlContext.sql(
      """
        |
        |select
        |sum(sellCount),
        |sum(salesAmount)
        |from
        |miya
        |
      """.stripMargin).show()*/

  sc.stop()
}
}
