package com.study.spark.scala.es

import org.apache.spark.{SparkConf, SparkContext}

/**
 * Spark读写ES
 * 参考：https://www.elastic.co/guide/en/elasticsearch/hadoop/current/spark.html
 * @author stephen
 * @date 2019-08-07 17:28
 */
object SparkESDemo {

  case class Trip(departure: String, arrival: String)

  def main(args: Array[String]): Unit = {

    val conf =new SparkConf().setAppName("Spark2ES").setMaster("local[2]")
    conf.set("es.nodes","localhost")
    conf.set("es.port","9200")
    conf.set("es.index.auto.create","true")

    val sc =new SparkContext(conf)

    import org.elasticsearch.spark._

    // 从ES读取数据
    //val readRdd = EsSpark.esRDD(sc,"test/_doc")
    val readRdd = sc.esRDD("test/_doc") // 需要导入隐式转换
    readRdd.foreach(println(_))

    // 保存数据到ES
    val upcomingTrip = Trip("OTP", "SFO")
    val lastWeekTrip = Trip("MUC", "OTP")

    val rdd = sc.makeRDD(Seq(upcomingTrip, lastWeekTrip))
    //EsSpark.saveToEs(rdd, "spark/docs")
    rdd.saveToEs("spark/_doc")
  }
}
