package a_aa_amainpackage.lvyou_clear.lv_update

import com.alibaba.fastjson.JSON
import org.apache.spark.sql.{DataFrame, SQLContext}
import org.apache.spark.{SparkConf, SparkContext}
import org.elasticsearch.spark._

/**
  * @ Auther: o2o-rd-0008
  * @ Date:   2020/6/2 11:17
  * @ Param:  ${PARAM}
  * @ Description: 
  */
object test_tuniu_lv2es {
  def main(args: Array[String]): Unit = {
    val conf = new SparkConf()
    conf.setAppName(s"${this.getClass.getSimpleName}")
    conf.set("spark.debug.maxToStringFields", "500")
    conf.setMaster("local[*]")
    conf.set("es.nodes", "192.168.1.157")
    conf.set("es.port", "9200")
    conf.set("cluster.name", "O2OElastic")
    conf.set("spark.mongodb.input.uri", "mongodb://root:O2Odata123!@ 192.168.0.149:27017/admin") //  mongodb://ob:O2Odata123!@ 192.168.0.56:27017/admin   args(0)
    conf.set("spark.serializer", "org.apache.spark.serializer.KryoSerializer")

    conf.set("fs.s3a.access.key", "GAO7EO9FWKPJ8WFCQDME")
    conf.set("fs.s3a.secret.key", "LZ0xaHBSYKHaJ9ECDbX9f7zin79UZkXfGoNapRPL")
    conf.set("fs.s3a.endpoint", "https://obs.cn-north-1.myhuaweicloud.com")

    val sc = new SparkContext(conf)
    val sqlContext = new SQLContext(sc)

//    val esData = sc.esJsonRDD("2020_update_tuniu_ly/tuniu_ly_2020_5").values
//
//    sqlContext.read.json(esData).write.json("s3a://o2o-dataproces-group/zsc/tuniu_ly_2020_5_back")

    val data: DataFrame = sqlContext.read.json("s3a://o2o-dataproces-group/zsc/tuniu_ly_2020_5_back")
    val value = data.toJSON.rdd.map(line => {
      val lines = JSON.parseObject(line)
      lines
    })
    value.saveToEs("2020_update_tuniu_ly/tuniu_ly_2020_5", Map("es.mapping.id" -> "product_Id"))
  }
}
