package com.o2o.cleaning.month.platform.ebusiness_plat.jumei_2019_7.save_to_es

import com.alibaba.fastjson.JSON
import org.apache.spark.sql.SparkSession
import org.elasticsearch.spark._
/**
  * @Description TODO 
  * @Author liutaowei
  * @Date 2018/12/4 17:16
  */
object data_es_to_es {

  def main(args: Array[String]): Unit = {

    /***=============重要================================*/
    val platform_Name = "jumei"
    var month="11"
    var year = "2018"
    var nodes ="192.168.2.247"
    val obsFs = s"s3a://o2o-dataproces-group/"
    /***=============重要================================*/

    val spark = SparkSession.builder()
      .appName("SparkTest")
      .master("local[*]")
      .config("es.nodes", nodes)
      .config("es.port", "9200")
      .config("cluster.name","O2OElastic")
      .config("es.net.http.auth.user", "elastic")
      .config("es.net.http.auth.pass", "changeme")
      .getOrCreate()
    //    val platform_shopUrl=obsFs + s"platdata/${year}/${month}/${platform_Name}/${platform_Name}_shop"

    val platform_goodUrl=obsFs + s"liu_taowei/month_data/platdata/2018/11/jumei/jumei_good/"

    val sc = spark.sparkContext
    sc.hadoopConfiguration.set("fs.s3a.access.key", "I6X0QOBRFQNDXGH6AMEU")
    sc.hadoopConfiguration.set("fs.s3a.secret.key", "uRrV8loOf6OszhzXz5GkkzBEQe5BY03vq5NEEWUw")
    sc.hadoopConfiguration.set("fs.s3a.endpoint", "https://obs.cn-north-1.myhuaweicloud.com")
    sc.setLogLevel("WARN")

    val da = sc.textFile(s"D:\\sss\\test\\platData\\11\\${platform_Name}_2")
    val value =da.map(line => {
      val lines = JSON.parseObject(line)
      lines
    })

    value.saveToEs(s"2018_${platform_Name}_${month}/type_1"
      ,Map("es.mapping.id"->"good_id"))
  }
}
