package com.o2o.utils

import com.alibaba.fastjson.{JSON, JSONObject}
import org.apache.spark.rdd.RDD
import org.apache.spark.sql.SparkSession

/**
  * @ Auther: o2o-rd-0008
  * @ Date:   2020/6/5 16:23
  * @ Param:  ${PARAM}
  * @ Description: 
  */
object ESDataPresale {
  def main(args: Array[String]): Unit = {

    val spark = SparkSession.builder()
      .appName(s"${this.getClass.getSimpleName}")
      .config("spark.debug.maxToStringFields", "2000")
      .config("spark.serializer", "org.apache.spark.serializer.KryoSerializer")
      .config("spark.sql.caseSensitive", "true")
      .config("es.nodes", "192.168.1.29")
      .config("es.port", "9200")
      .config("cluster.name", "O2OElastic")
      .config("es.net.http.auth.user", "elastic")
      .config("es.net.http.auth.pass", "changeme")
//      .master("local[*]")
      .getOrCreate()

    val sc = spark.sparkContext
    sc.hadoopConfiguration.set("fs.s3a.access.key", Iargs.OBSACCESS)
    sc.hadoopConfiguration.set("fs.s3a.secret.key", Iargs.OBSSECRET)
    sc.hadoopConfiguration.set("fs.s3a.endpoint", Iargs.OBSENDPOINT)
    sc.setLogLevel("WARN")


    val jd: RDD[String] = spark.read.orc("s3a://dws-data/g_data/2020/shuang11/2020/jd/").toJSON.rdd.map(lines => {
      val nObject: JSONObject = JSON.parseObject(lines)

      val evaluates: String = nObject.getOrDefault("evaluates", "-1").toString

      nObject.put("evaluates", evaluates)

      nObject.toString
    })
    spark.read.json(jd).write.orc("s3a://dws-data/g_data/2020/shuang11/2020/jd_new/")



      //    spark.read.orc("s3a://dws-data/g_data/2020/shuang11/2020_presale/tmall").show()
      //    spark.read.orc("s3a://dws-data/g_data/2020/shuang11/2020_presale/tmall/").printSchema()


      /*import org.elasticsearch.spark._

    val values: RDD[String] = sc.esJsonRDD("2020_presale_tmall/tmall_2020_shuang11_2_2020").values

    val value: RDD[String] = spark.read.json(values).toJSON.rdd.map(lines => {
      val nObject: JSONObject = JSON.parseObject(lines)

      val evaluates: String = nObject.getOrDefault("evaluates","-1").toString

      nObject.put("evaluates",evaluates)

      nObject.toString
    })


    spark.read.json(value).write.orc("s3a://dws-data/g_data/2020/shuang11/2020_presale_2_tmall/tmall_orc/")
    spark.read.json(value).write.json("s3a://dws-data/g_data/2020/shuang11/2020_presale_2_tmall/tmall_json/")*/


      /*val value: RDD[String] = spark.read.json(values).toJSON.rdd.map(lines => {
      val nObject: JSONObject = JSON.parseObject(lines)

      val evaluates: String = nObject.getOrDefault("evaluates","-1").toString

      val is_showLive = nObject.getBoolean("is_showLive")

      nObject.put("is_showLive",is_showLive)

      nObject.put("evaluates",evaluates)

      nObject.toString
    })

    spark.read.json(value).write.orc("s3a://dws-data/g_data/2020/shuang11/2020_swb_presale/tmall/")
    spark.read.json(value).write.json("s3a://dws-data/g_data/2020/shuang11/2020_swb_presale/tmall_json/")*/

    /*val kaola: RDD[String] = sc.esJsonRDD("kaola_2020_activity/kaola_2020_11_2020").values

    val kaola1: RDD[String] = spark.read.json(kaola).toJSON.rdd.map(lines => {
      val nObject: JSONObject = JSON.parseObject(lines)

      val evaluates: String = nObject.getOrDefault("evaluates","-1").toString

      val is_oversea: Boolean = nObject.getString("is_oversea").toBoolean

      nObject.put("timeStamp","1606665600")
      nObject.put("is_oversea",is_oversea)
      nObject.put("evaluates",evaluates)

      nObject.toString
    })


    spark.read.json(kaola1).write.orc("s3a://dws-data/g_data/2020/shuang11/2020/kaola/")*/


    /*val suning: RDD[String] = sc.esJsonRDD("suning_2020_activity/suning_2020_11_2020").values

    val suning1: RDD[String] = spark.read.json(suning).toJSON.rdd.map(lines => {
      val nObject: JSONObject = JSON.parseObject(lines)

      val evaluates: String = nObject.getOrDefault("evaluates","-1").toString
      val is_market: Boolean = nObject.getString("is_market").toBoolean
      val is_oversea: Boolean = nObject.getString("is_oversea").toBoolean
      val is_o2o: Boolean = nObject.getString("is_o2o").toBoolean
      nObject.put("timeStamp","1606665600")
      nObject.put("is_oversea",is_oversea)
      nObject.put("is_o2o",is_o2o)
      nObject.put("is_market",is_market)
      nObject.put("evaluates",evaluates)

      nObject.toString
    })

    spark.read.json(suning1).write.orc("s3a://dws-data/g_data/2020/shuang11/2020/suning/")*/


    /*val guomei: RDD[String] = sc.esJsonRDD("guomei_2020_activity/guomei_2020_11_2020").values

    val guomei1: RDD[String] = spark.read.json(guomei).toJSON.rdd.map(lines => {
      val nObject: JSONObject = JSON.parseObject(lines)

      val evaluates: String = nObject.getOrDefault("evaluates","-1").toString
      val is_o2o: Boolean = nObject.getString("is_o2o").toBoolean
      val is_oversea: Boolean = nObject.getString("is_oversea").toBoolean
//      val is_market: Boolean = nObject.getString("is_market").toBoolean
      nObject.put("timeStamp","1606665600")
      nObject.put("is_oversea",is_oversea)
//      nObject.put("is_market",is_market)
      nObject.put("is_o2o",is_o2o)
      nObject.put("evaluates",evaluates)

      nObject.toString
    })

    spark.read.json(guomei1).write.orc("s3a://dws-data/g_data/2020/shuang11/2020/guomei/")*/

//    spark.read.orc("s3a://o2o-dataproces-group/zhao_jiadi/activity/1111/jd/2020/11/finallyResult_201103/")
//      .write.orc("s3a://dws-data/g_data/2020/shuang11/2020_11_1_2020_tui/jd/")

    sc.stop()
  }
}
