package org.es

import java.util

import org.FlinkStreamApp
import org.apache.flink.api.common.functions.RuntimeContext
import org.apache.flink.streaming.api.scala._
import org.apache.flink.streaming.connectors.elasticsearch.{ElasticsearchSinkFunction, RequestIndexer}
import org.apache.flink.streaming.connectors.elasticsearch6.ElasticsearchSink
import org.apache.http.HttpHost
import org.elasticsearch.action.index.IndexRequest
import org.elasticsearch.client.Requests

/**
 * description ：数据写入 es
 * author      ：剧情再美终是戏 
 * mail        : 13286520398@163.com
 * date        ：Created in 2020/2/25 19:26
 * modified By ：
 * version:    : 1.0
 */
object EsWrite extends FlinkStreamApp {
  override def doSomeThing(environment: StreamExecutionEnvironment) = {
    // 获取源数据
    val source = environment.fromElements(
      "1", "2", "3"
    )

    // 构建 es bulid
    val httpHosts = new java.util.ArrayList[HttpHost]()
    httpHosts.add(new HttpHost("hadoop101", 9200, "http"))
    httpHosts.add(new HttpHost("hadoop102", 9200, "http"))
    httpHosts.add(new HttpHost("hadoop103", 9200, "http"))

    val build = new ElasticsearchSink.Builder[String](
      httpHosts,
      new ElasticsearchSinkFunction[String] {
        def createIndexRequest(data: String): IndexRequest = {
          val json = new util.HashMap[String, String]()
          json.put("data", data)

          Requests
            .indexRequest()
            .index("t01")
            .`type`("_doc")
            .source(json)
        }

        override def process(t: String, runtimeContext: RuntimeContext, requestIndexer: RequestIndexer) = {
          requestIndexer.add(createIndexRequest(t))
        }
      }
    )

    // 提交延迟
    build.setBulkFlushMaxActions(1)
    source.addSink(build.build())
  }
}
