package spark.spark2es

import org.apache.spark.{SparkConf, SparkContext}
import org.elasticsearch.spark._

/**
  *
  * @author com.eureka.wh   
  * @since 2019/6/29 8:28
  */
object Spark2Es01 {

  def main(args: Array[String]): Unit = {

    writeToES
  }

  def readFromEs = {
    val conf = new SparkConf().setAppName("SparkToES").setMaster("local[2]")
    conf.set("es.nodes", "spark001")
    conf.set("es.port", "9200")
    conf.set("es.index.auto.create", "true")
    val sc = new SparkContext(conf)
    val ds = sc.esRDD("spark/docs")
    println("读取es结果===>" + ds.count())
    sc.stop()
  }


  def writeToES={
    val conf = new SparkConf().setAppName("SparkToES").setMaster("local[2]")
    conf.set("es.nodes", "spark001")
    conf.set("es.port", "9200")
    conf.set("es.index.auto.create", "true")
    val sc = new SparkContext(conf)

    val numbers = Map("one" -> "1","two" -> "2","three" -> "3")
    val airports = Map("OTP" -> "xxx","SFO" -> "yyy")
    sc.makeRDD(Seq(numbers,airports)).saveToEs("spark/docs")

    sc.stop()
  }

}
