package com.eshipenet.shipnet.dp_decompressor

import java.util.Properties

import org.apache.avro.generic.GenericRecord
import org.apache.flink.streaming.api.scala._
import org.apache.flink.streaming.connectors.kafka.FlinkKafkaConsumer
import org.json4s.JsonAST.JObject

import scala.util.Try

object EdgeChannelDataPacketStreamProvider {

  def create(env: StreamExecutionEnvironment): DataStream[EdgeChannelDataPacket] = {
    val properties = new Properties()
    properties.setProperty("bootstrap.servers", "kafka-server:9092")
    properties.setProperty("group.id", "dp_decompressor.EdgeChannelDataPacketStream")
    val der      = new EdgeChannelDataPacketKafkaDeserializer
    val consumer = new FlinkKafkaConsumer[EdgeChannelDataPacketKafkaMessage]("EdgeChannelDataPacket1", der, properties)
    //    consumer.setStartFromEarliest()
    consumer.setStartFromLatest()
    val stream = env
      .addSource(consumer)
      //      .map(gr => parseGenericRecord(gr))
      //      .filter(_.isSuccess)
      //      .map(_.get)
      .map(msg => EdgeChannelDataPacket.parseMsg(msg))

    stream.print("err")

    stream
      .filter(_.isSuccess)
      .map(_.get)
  }

  def parseGenericRecord(record: GenericRecord): Try[EdgeChannelDataPacketKafkaMessage] = Try {
    val v = record.get(0)
    println(s"v = $v")
    EdgeChannelDataPacketKafkaMessage("", "{}")
  }

  def createTest(env: StreamExecutionEnvironment): DataStream[EdgeChannelDataPacket] =
    env.fromCollection(
      List(EdgeChannelDataPacket("EB01", "{}", "NONE", None, JObject()),
           EdgeChannelDataPacket("EB02", "{}", "NONE", None, JObject()))
    )

}
