package cn.wangjie.spark.store.es

import org.elasticsearch.spark._
import org.elasticsearch.spark.sql._
import org.apache.spark.SparkConf
import org.apache.spark.SparkContext
import org.apache.spark.SparkContext._
import org.apache.hadoop.conf.Configuration
import org.elasticsearch.hadoop.mr.EsInputFormat
import org.apache.hadoop.io.Text
import org.apache.hadoop.io.MapWritable

object EsMain {
  def main(args: Array[String]): Unit = {
    val conf = new SparkConf().setAppName("estest").setMaster("local[*]")

    //  conf.set("es.resource", "nm_1606/news") //指定读取的索引名称
    conf.set("es.nodes", "127.0.0.1")
    conf.set("es.port", "9200")
    conf.set("es.query", " {  \"query\": {    \"match_all\": {    }  }}") //使用query字符串对结果进行过滤
    val sc = new SparkContext(conf)
    val rdd = sc.esRDD("myrow")
    rdd.foreach(line => println(line._1 + "\n" + line._2))
    println(rdd.count)

  }
}