package com.zhao.demo.unbound.demo05_sink.es

import com.zhao.demo.Raytek
import org.apache.flink.api.common.functions.RuntimeContext
import org.apache.flink.streaming.api.scala.{DataStream, StreamExecutionEnvironment}
import org.apache.flink.streaming.connectors.elasticsearch.{ElasticsearchSinkFunction, RequestIndexer}
import org.apache.flink.streaming.connectors.elasticsearch6.ElasticsearchSink
import org.apache.http.HttpHost
import org.elasticsearch.action.index.IndexRequest
import org.elasticsearch.client.Requests

import scala.collection.JavaConversions._

/**
 * Description: es Sink演示<br/>
 * Copyright (c) ，2020 ， 赵 <br/>
 * This program is protected by copyright laws. <br/>
 * Date： 2020/12/10 13:40
 *
 * @author 柒柒
 * @version : 1.0
 */

object ESSinkDemo {
  def main(args: Array[String]): Unit = {

    //1.环境
    val env: StreamExecutionEnvironment = StreamExecutionEnvironment.getExecutionEnvironment

    //2.读取源,进行处理,最后落地到es中
    import org.apache.flink.api.scala._

    //a)计算结果
    val result: DataStream[Raytek] = env.socketTextStream("NODE01", 7777)
      .filter(_.trim.nonEmpty)
      .map(perLine => {
        val arr = perLine.split(",")
        val id: String = arr(0).trim
        val temperature: Double = arr(1).trim.toDouble
        val name: String = arr(2).trim
        val timestamp: Long = arr(3).trim.toLong
        val location: String = arr(4).trim
        Raytek(id, temperature, name, timestamp, location)
      }).filter(perTraveller => perTraveller.temperature < 36.3 || perTraveller.temperature > 37.2) //←筛选出体温异常的旅客信息

    //b)落地到es中存储起来

    //i)准备httpHosts,用来存储于远程es分布式集群连接的信息
    val httpHosts: List[HttpHost] = List(
      new HttpHost("NODE01", 9200),
      new HttpHost("NODE02", 9200),
      new HttpHost("NODE03", 9200)
    )

    //ii)准备ElasticsearchSinkFunction特质实现类的实例,用于操作ES分布式集群
    val elasticsearchSinkFunction: MyESSinkFunction = new MyESSinkFunction()

    //iii)构建Builder实例
    val builder: ElasticsearchSink.Builder[Raytek] = new ElasticsearchSink.Builder[Raytek](httpHosts, elasticsearchSinkFunction)

  //ElasticsearchSink为每个批次请求设置要缓冲的最大操作数(document数)
    //flink处理积攒了多少条数据,然后一起向es送过去.需要设置,
    //否则,数据不能正常落地到es
    builder.setBulkFlushMaxActions(1)

    //iiii)正式构建ElasticsearchSink实例
    val esSink: ElasticsearchSink[Raytek] = builder.build()

    //iiiii)sink到es
    result.addSink(esSink)

    //3.启动
    env.execute(this.getClass.getSimpleName)
  }

  /**
   * 自定义ElasticsearchSinkFunction特质实现类,用来向es中存入计算后的数据
   */
  class MyESSinkFunction extends ElasticsearchSinkFunction[Raytek]{

    /**
     * 当前的DataStream中每流动一个元素,下述的方法就会触发执行一次
     * @param element
     * @param ctx
     * @param indexer
     */
    override def process(element: Raytek, ctx: RuntimeContext, indexer: RequestIndexer): Unit = {
      println(s"->待处理的体温异常的旅客信息是: $element")

      //i)将当前的Raytek实例中的信息封装到Map中
      //id: String, temperature: Double, name: String, timestamp: Long, location: String
      val scalaMap: Map[String, String] = Map[String, String](
        "id" -> element.id.trim,
        "temperature" -> element.temperature.toString.trim,
        "name" -> element.name.trim,
        "timestamp" -> element.timestamp.toString.trim,
        "location" -> element.Location.trim
      )

      val javaMap: java.util.Map[String,String] = scalaMap

      //ii)构建indexRequest实例
      val indexRequest: IndexRequest = Requests.indexRequest()
        .index("raytek")
        .`type`("traveller")
        .id(s"${element.id.trim} ->${element.name.trim}")
        .source(javaMap)

      //iii)存储
      indexer.add(indexRequest)

    }
  }

}









