package com.lkr.flink.transform

import com.alibaba.fastjson.JSON
import com.hhy.flink.ReloadData
import org.apache.flink.api.common.serialization.SimpleStringSchema
import org.apache.flink.api.scala.createTypeInformation
import org.apache.flink.streaming.api.scala.StreamExecutionEnvironment
import org.apache.flink.streaming.api.windowing.assigners.SlidingTimeWindows
import org.apache.flink.streaming.api.windowing.time.Time
import org.apache.flink.streaming.connectors.kafka.FlinkKafkaConsumer

import java.util.Properties


object KafKaStreamingWindows {

  def main(args: Array[String]): Unit = {
    val ZOOKEEPER_HOST = "";
    val KAFKA_BROKER = "10.254.32.6:9092,10.254.32.7:9092,10.254.32.8:9092";
    val env = StreamExecutionEnvironment.getExecutionEnvironment
    env.enableCheckpointing(5000)

    val properties = new Properties()
    properties.setProperty("bootstrap.servers",KAFKA_BROKER)
    //properties.setProperty("zookeeper.connect",ZOOKEEPER_HOST)
    properties.setProperty("group.id","flinkTest")

    val stream = env.addSource(new FlinkKafkaConsumer[String]("trap_gz",new SimpleStringSchema(),properties))

    val data = stream.map(x => JSON.parseArray(x)).flatMap(x=>{
      x.toArray()
    }).map(x=>{
      val node = JSON.parseObject(x.toString)
      (node.getString("active_time"),node.getString("wayside_mac"),node.getString("carriage_ceiling_mac"))
      //return new Tuple3(node.getString("active_time"),node.getString("wayside_mac"),node.getString("carriage_ceiling_mac"))
    }).map(new ReloadData())

     val data1= data.map(x=>{
       (x._2,1)
     }).keyBy(0).timeWindow(Time.minutes(1),Time.seconds(10)).sum(0)

     data1.print()

    env.execute()

  }

}
