import com.alibaba.fastjson.{JSON, JSONObject}
import com.o2o.utils.Iargs
import org.apache.spark.rdd.RDD
import org.apache.spark.sql.{DataFrame, SparkSession}

/**
  * @ Auther: o2o-rd-0008
  * @ Date:   2020/6/5 16:23
  * @ Param:  ${PARAM}
  * @ Description: 
  */
object InsertIntimeData {
  def main(args: Array[String]): Unit = {

    val spark = SparkSession.builder()
      .appName(s"${this.getClass.getSimpleName}")
      .config("spark.debug.maxToStringFields", "2000")
      .config("spark.serializer", "org.apache.spark.serializer.KryoSerializer")
      .config("spark.sql.caseSensitive", "true")
      .config("es.nodes", "192.168.1.29")
      .config("es.port", "9200")
      .config("cluster.name","O2OElastic")
      .config("es.net.http.auth.user", "elastic")
      .config("es.net.http.auth.pass", "changeme")
      .master("local[*]")
      .getOrCreate()

    val sc = spark.sparkContext
    sc.hadoopConfiguration.set("fs.s3a.access.key", Iargs.OBSACCESS)
    sc.hadoopConfiguration.set("fs.s3a.secret.key", Iargs.OBSSECRET)
    sc.hadoopConfiguration.set("fs.s3a.endpoint", Iargs.OBSENDPOINT)
    sc.setLogLevel("WARN")

    val frame: DataFrame = spark.read.json("s3a://o2o-sourcedata/obs-source-2020/intime/2020/9/modify/")



    val value: RDD[String] = frame.toJSON.rdd.map(lines => {
      val nObject: JSONObject = JSON.parseObject(lines)

      val obj = new JSONObject()

      obj.put("good_id", nObject.getString("good_id"))
      obj.put("name", nObject.getString("mallName"))
      obj.put("shopName", nObject.getString("shopName") + "（" + nObject.getString("address") + "）")
      obj.put("goodUrl", nObject.getString("goodUrl"))


      obj.toString
    })


    val frame1: DataFrame = spark.read.json(value)


    import org.elasticsearch.spark._

    val values: RDD[String] = sc.esJsonRDD("2020_intime/intime_2020_8").values

    val frame2: DataFrame = spark.read.json(values).drop("shopName").drop("goodUrl")

    frame1.registerTempTable("dimen")
    frame2.registerTempTable("source")

    val res = spark.sql(
      """
        |
        |select
        |a.*,
        |b.name,
        |b.shopName,
        |b.goodUrl
        |from
        |source a
        |left join
        |dimen b
        |on a.good_id=b.good_id
      """.stripMargin)

    res.toJSON.rdd.map(lines=>{
      val nObject: JSONObject = JSON.parseObject(lines)

      nObject
    }).saveToEs("2020_intime/intime_2020_8",Map("es.mapping.id"->"good_id"))


  sc.stop()
}
}
