package com.lkr.flink.transform

import com.alibaba.fastjson.JSON
import org.apache.flink.api.common.serialization.SimpleStringSchema
import org.apache.flink.api.scala.createTypeInformation
import org.apache.flink.streaming.api.scala.StreamExecutionEnvironment
import org.apache.flink.streaming.connectors.kafka.FlinkKafkaConsumer

import java.util
import java.util.Properties
import com.hhy.flink.ReloadData
import org.apache.flink.api.common.functions.RuntimeContext
import org.apache.flink.streaming.connectors.elasticsearch.{ElasticsearchSinkFunction, RequestIndexer}
import org.apache.flink.streaming.connectors.elasticsearch2.ElasticsearchSink
import org.elasticsearch.client.Requests

import java.time.LocalDate
import org.apache.http.HttpHost

import java.net.{InetAddress, InetSocketAddress}



object KafKaStreamingToEs {

  def main(args: Array[String]): Unit = {
    val ZOOKEEPER_HOST = "";
    val KAFKA_BROKER = "10.254.32.6:9092,10.254.32.7:9092,10.254.32.8:9092";
    val env = StreamExecutionEnvironment.getExecutionEnvironment
    env.enableCheckpointing(5000)

    val properties = new Properties()
    properties.setProperty("bootstrap.servers",KAFKA_BROKER)
    //properties.setProperty("zookeeper.connect",ZOOKEEPER_HOST)
    properties.setProperty("group.id","flinkTest")

    val stream = env.addSource(new FlinkKafkaConsumer[String]("trap_gz",new SimpleStringSchema(),properties))

    val data = stream.map(x => JSON.parseArray(x)).flatMap(x=>{
      x.toArray()
    }).map(x=>{
      val node = JSON.parseObject(x.toString)
      (node.getString("active_time"),node.getString("wayside_mac"),node.getString("carriage_ceiling_mac"))
      //return new Tuple3(node.getString("active_time"),node.getString("wayside_mac"),node.getString("carriage_ceiling_mac"))
    }).map(new ReloadData()).map(x=>x._1+","+x._2+","+x._3)

    val myEsSinkFunc = new ElasticsearchSinkFunction[String] {
      override def process(t: String, runtimeContext: RuntimeContext, requestIndexer: RequestIndexer): Unit = {
        //定义一个map作为 数据源
        val dataSource = new util.HashMap[String, String]()
        dataSource.put("a_time", t.split(",")(0))
        dataSource.put("w_mac", t.split(",")(1))
        dataSource.put("c_mac", t.split(",")(2))

        //创建index request ，指定index
        val indexRequest = Requests.indexRequest()
        indexRequest.index("test_trap_flink") //指定写入哪一个索引
          .source(dataSource).`type`("flink") //指定写入的数据

        //执行新增操作
        requestIndexer.add(indexRequest)
      }
    }


    val config = new util.HashMap[String, String]
    config.put("bulk.flush.max.actions", "1")
    config.put("cluster.name", "nfyg")

    val transports = new util.ArrayList[InetSocketAddress]
    transports.add(new InetSocketAddress(InetAddress.getByName("10.254.32.204"), 9300))
   /* data.addSink(new ElasticsearchSink.Builder[String](httpHosts, myEsSinkFunc)
      .build()
    )*/
    data.addSink(new ElasticsearchSink(config, transports,myEsSinkFunc))



    env.execute()

  }

}
