package com.toES


import org.apache.spark.internal.Logging
import org.apache.spark.{SparkConf, SparkContext}
import org.slf4j.LoggerFactory

/**
  *
  * @author ymy.hadoop
  */
object SparkToES extends Logging with Serializable {

  def main(args: Array[String]): Unit = {

    val logger = LoggerFactory.getLogger(this.getClass)

    import org.elasticsearch.spark._

    val conf = new SparkConf().setAppName("SparkToES").setMaster("local[2]")
    // conf.set("es.nodes.wan.only","false")
    conf.set("es.nodes", "spark001")
    conf.set("es.port", "9200")
    conf.set("es.index.auto.create", "true")

    val sc = new SparkContext(conf)

    val numbers = Map("OTP" -> "1","twh" -> "2","three" -> "3")

    val airports = Map("OTP" -> "xxx","SFO" -> "yyy")

//    val rdd = sc.makeRDD(Seq(numbers,airports)).saveToEs("sql_command/sql_info", Map("es.mapping.id" -> "md5id"))

//    val rdd = sc.makeRDD(Seq(numbers,airports))

    sc.makeRDD(Seq(numbers,airports)).saveToEs("sql_command/sql_info")

    sc.stop()
  }
}
