package com.study.flink.scala.day02_xcy

import java.net.{InetAddress, InetSocketAddress}
import java.util

import cn.hutool.json.JSONUtil
import org.apache.flink.api.common.functions.{MapFunction, RuntimeContext}
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment
import org.apache.flink.streaming.connectors.elasticsearch.{ElasticsearchSinkFunction, RequestIndexer}
import org.apache.flink.streaming.connectors.elasticsearch5.ElasticsearchSink
import org.apache.flink.streaming.connectors.kafka.FlinkKafkaConsumer
import org.elasticsearch.action.index.IndexRequest
import org.elasticsearch.client.Requests

object Main {
  def main(args: Array[String]): Unit = {
    import org.apache.flink.api.common.restartstrategy.RestartStrategies
    import org.apache.flink.api.java.utils.ParameterTool
    import org.apache.flink.streaming.api.TimeCharacteristic
    System.setProperty("hadoop.home.dir", "F:\\document\\hadoop-2.7.3_32b_bin")
    var args2 = Array(
      "--input-topic", "GPS_808SERVER_ALARM",
      "--bootstrap.servers", "192.168.50.104:6667,192.168.50.105:6667",
      "--zookeeper.connect", "192.168.50.101",
      "--group.id", "myconsumer1",
      "--winsdows.size", "10"
    )

    val parameterTool = ParameterTool.fromArgs(args2)

    if (parameterTool.getNumberOfParameters < 5) {
      System.out.println("Missing parameters!\n" + "Usage: Kafka --input-topic <topic>" + "--bootstrap.servers <kafka brokers> " + "--zookeeper.connect <zk quorum> --group.id <some id>")
      return
    }

//    val env = StreamExecutionEnvironment.getExecutionEnvironment
    val env = StreamExecutionEnvironment.createLocalEnvironment(5)

    env.getConfig.disableSysoutLogging
    env.getConfig.setRestartStrategy(RestartStrategies.fixedDelayRestart(4, 10000))
    env.enableCheckpointing(5000) // create a checkpoint every 5 seconds

    env.getConfig.setGlobalJobParameters(parameterTool) // make parameters available in the web interface
    env.setStreamTimeCharacteristic(TimeCharacteristic.EventTime) //设置按照时间格式  EventTime:事件时间   ProcessingTime：处理时间    IngestionTime：摄入时间

    val flinkKafkaConsumer = new FlinkKafkaConsumer[String](parameterTool.getRequired("input-topic"), new KafkaMessageSchemaScala(), parameterTool.getProperties)

    //es配置
    var es_conf:util.Map[String, String] = new util.HashMap[String, String]()
    es_conf.put("cluster.name" , "HNS-ES")
    es_conf.put("bulk.flush.max.actions" , "1")
    var es_access:java.util.List[InetSocketAddress] = new java.util.ArrayList[InetSocketAddress]()
    es_access.add(new InetSocketAddress(InetAddress.getByName("192.168.50.31"), 9300))
    es_access.add(new InetSocketAddress(InetAddress.getByName("192.168.50.31"), 9301))
    es_access.add(new InetSocketAddress(InetAddress.getByName("192.168.50.34"), 9305))
    es_access.add(new InetSocketAddress(InetAddress.getByName("192.168.50.34"), 9306))

    //水印，在事件时间场景下，Flink 支持水印按事件时间处理可能的延迟和乱序事件。水印的作用：告知算子之后不会有小于或等于水印时间戳的事件。
    val input = env.addSource(flinkKafkaConsumer.assignTimestampsAndWatermarks(new KafkaMessageWatermarksScala))
    val value = input.map(new MapFunction[String, String] {
      override def map(value: String): String = {
        println(value)
        val jo = JSONUtil.parseObj(value)
        jo.getStr("id")
        jo.toString
      }
    })
    .addSink(new ElasticsearchSink[String](es_conf, es_access, new ElasticsearchSinkFunction[String] {
      def createIndexRequest(element: String): IndexRequest = {
        var source:util.HashMap[String, Object] = new util.HashMap[String, Object]()
        source.put("key1","val1")
        source.put("key2","val2")
        Requests.indexRequest.index("xcy_test").`type`("xcy_test").source(source)
      }

      def process(element: String, ctx: RuntimeContext, indexer: RequestIndexer): Unit = {
        indexer.add(createIndexRequest(element))
      }
    }))

    try
      env.execute("myjob")
    catch {
      case e: Exception =>
        e.printStackTrace()
    }
  }
}
