package org.apache.flink

import java.util.Properties

import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment
import org.apache.flink.streaming.connectors.kafka.FlinkKafkaConsumer
import org.apache.flink.streaming.util.serialization.{JSONKeyValueDeserializationSchema, SimpleStringSchema}
import org.apache.flink.api.scala._
import org.apache.flink.shaded.jackson2.com.fasterxml.jackson.databind.JsonNode
import org.apache.flink.shaded.jackson2.com.fasterxml.jackson.databind.node.ObjectNode
import org.apache.flink.streaming.api.TimeCharacteristic

object BasicApp1 {

    def main(args: Array[String]): Unit = {

        val env: StreamExecutionEnvironment = StreamExecutionEnvironment.getExecutionEnvironment
        env.setStreamTimeCharacteristic(TimeCharacteristic.EventTime)

        val properties = new Properties()
        properties.setProperty("bootstrap.servers", "hadoop000:9092")
        properties.setProperty("group.id", "test")
        val stream = env
            .addSource(new FlinkKafkaConsumer[ObjectNode]("canal_test", new JSONKeyValueDeserializationSchema(true), properties))
        stream.filter(new TableFilter).map(x=>{
            val node: JsonNode = x.get("value").get("data").get(0)
            student(node.get("id").asInt(), node.get("name").asText(), node.get("age").asInt())
        }).printToErr()

        env.execute("app2")
    }
}

case class student(id:Int, name:String, age:Int)