package com.o2o.cleaning.month.platform.ebusiness_plat.lst

import com.alibaba.fastjson.JSON
import com.o2o.utils.Iargs
import org.apache.spark.sql.{DataFrame, SparkSession}

object Lst {

  def main(args: Array[String]): Unit = {
    val spark = SparkSession.builder()
      .appName(s"${this.getClass.getSimpleName}")
      .config("spark.debug.maxToStringFields", "2000")
      .config("spark.serializer", "org.apache.spark.serializer.KryoSerializer")
      .config("spark.sql.caseSensitive", "true")
//            .master("local[*]")
      .getOrCreate()

    val sc = spark.sparkContext
    sc.hadoopConfiguration.set("fs.s3a.access.key", Iargs.OBSACCESS)
    sc.hadoopConfiguration.set("fs.s3a.secret.key", Iargs.OBSSECRET)
    sc.hadoopConfiguration.set("fs.s3a.endpoint", Iargs.OBSENDPOINT)
    sc.setLogLevel("WARN")

    /** *******************  重要    修改 *************************************/

    val hdfs_188 = "hdfs://192.168.2.188:9000/"
    val obs = "s3a://"

    val platform_Name = "lst"

    val year = Iargs.YEAR
    val month = Iargs.MONTH.toInt
    val time_stamp = Iargs.TIMESAMP

    val data_path_cur = obs + s"o2o-sourcedata/obs-source-${year}/${month}/${platform_Name}/${platform_Name}_${year}_${month}"
    /*val data_path_last = obs + s"o2o-sourcedata/obs-source-${year}/${month}/${platform_Name}/${platform_Name}_${year}_${month - 1}"

    val data_df_cur: DataFrame = spark.read.json(data_path_cur).dropDuplicates("good_id")
    val data_df_last: DataFrame = spark.read.json(data_path_cur).dropDuplicates("good_id")

    data_df_cur.registerTempTable("data_df_cur")
    data_df_last.registerTempTable("data_df_last")

    val data = spark.sql(
      """
        |select a.*, b.sellCount90 as sellCount90_last from
        |data_df_cur a
        |left join
        |(select good_id,sellCount90 from data_df_last) b
        |on a.good_id = b.good_id
      """.stripMargin)

    data.show()

    val rdd = data.toJSON.rdd.map(line => {
      val nObject = JSON.parseObject(line)

      val sellCount90 = nObject.getOrDefault("sellCount90","-1").toString.toInt
      val sellCount90_last = nObject.getOrDefault("sellCount90_last","0").toString.toInt

      val priceText = nObject.getOrDefault("caigouPrice","-1").toString.toDouble

      val sellCount = sellCount90 - sellCount90_last

      nObject.put("priceText", priceText)
      nObject.put("sellCount", sellCount)
      nObject.put("salesAmount", (priceText * sellCount).formatted("%.2f").toDouble)

      nObject.toString
    })

    val result = spark.read.json(rdd).filter("sellCount > 0 and salesAmount > 0")

    result.registerTempTable("ttt")

    val test = spark.sql(
      """
        |select count(1) num, sum(sellCount) sellCount, sum(salesAmount) salesAmount from ttt
      """.stripMargin)

    test.show()*/

    val data_df: DataFrame = spark.read.json(data_path_cur).dropDuplicates("good_id")
    val rdd = data_df.toJSON.rdd.map(line => {
      val nObject = JSON.parseObject(line)

      val sellCount90 = nObject.getOrDefault("sellCount90","-1").toString.toInt
      val priceText = nObject.getOrDefault("caigouPrice","-1").toString.toDouble

      val sellCount = sellCount90/3

      nObject.put("priceText", priceText)
      nObject.put("sellCount", sellCount)
      nObject.put("salesAmount", (priceText * sellCount).formatted("%.2f").toDouble)

      nObject.toString
    })

    val result = spark.read.json(rdd).filter("sellCount > 0 and salesAmount > 0").drop("Base_Info")


    val ress = result.toJSON.rdd.map(line => {
      val lines = JSON.parseObject(line)

      lines
    })

    println(s"    开始入库    ==     node_247   ==  ${year}_${platform_Name}_${month}  ")

    import org.elasticsearch.spark._
    ress.saveToEs(
      s"247_${year}_lst/lst_${year}_${month}",
      Map("es.mapping.id" -> "good_id",
        "es.nodes" -> "192.168.1.29",
        "es.net.http.auth.user" -> "elastic",
        "es.net.http.auth.pass" -> "changeme",
        "es.port" -> "9200",
        "cluster.name" -> "O2OElastic"))

    println("*****************  已入完  *****************")

    /*result.registerTempTable("ttt")

    val test = spark.sql(
      """
        |select count(1) num, sum(sellCount) sellCount, sum(salesAmount) salesAmount from ttt
      """.stripMargin)

    test.show(false)*/

    sc.stop()
  }
}
