package com.kay.es

import org.apache.spark.{SparkConf, SparkContext}
import org.elasticsearch.spark._
/**
  * Created by Administrator on 2018/6/12.
  */
object savees {
  def main(args: Array[String]): Unit = {
    val conf: SparkConf = new SparkConf()
      .setMaster("local[2]")
      .setAppName("spark_savees")
      .set("es.index.auto.create", "true")
    .set("es.nodes", "localhost")
    .set("es.port", "9200")

    val sc: SparkContext = new SparkContext(conf)
//    val sQLContext: SQLContext = new SQLContext(sc)
    val p1 = Map("khh" -> 100100101,"name"->"zhangsan2","age" -> 20)

//    val p2 = Map("khh" -> 1001002,"name"->"lisi","age" -> 30)
//    val p3 = Map("khh" -> 1001003,"name"->"wangwu","age" -> 40)
    val rdd = sc.makeRDD(Seq(p1/*,p2,p3*/))
    rdd.foreach(println(_))
    rdd.saveToEs("sparkes/info",Map("es.mapping.id"->"khh"))


    sc.stop()
  }
}
