package com.huan

import org.apache.http.HttpHost
import org.apache.spark.SparkConf
import org.apache.spark.streaming.{Seconds, StreamingContext}
import org.elasticsearch.action.index.{IndexRequest, IndexResponse}
import org.elasticsearch.client.{RequestOptions, RestClient, RestHighLevelClient}
import org.elasticsearch.common.xcontent.XContentType

object SparkStreamingESTest {
  def main(args: Array[String]): Unit = {

    val conf = new SparkConf().setMaster("local[*]").setAppName(this.getClass.getSimpleName)
    val scc = new StreamingContext(conf, Seconds(3))
    //数据源
    val ds = scc.socketTextStream("localhost", 9200)
    ds.foreachRDD(
      rdd => {
        rdd.foreach(
          data => {
           val client = new RestHighLevelClient(
              RestClient.builder(new HttpHost("localhost",9200,"http"))
            )

            //空格切分
            val ss =  data.split(" ")

            val request = new IndexRequest()
            request.index("product").id(ss(0))

            val json =
              s"""
                | { "data" : "${ss(1)} "}
                |""".stripMargin

            request.source(json,XContentType.JSON)

            val response: IndexResponse = client.index(request, RequestOptions.DEFAULT)
            print(response.getResult)
            client.close()
          }
        )
      }
    )
    //开启并等待执行
    scc.start()
    scc.awaitTermination()
  }
}
