package com.eshipenet.shipnet.dp_decoder

import java.util.Properties

import com.eshipenet.shipnet.dp_decoder.KafkaMessageDeserializer.pFields
import org.apache.flink.api.common.serialization.SimpleStringSchema
import org.apache.flink.streaming.api.scala._
import org.apache.flink.streaming.connectors.kafka.FlinkKafkaConsumer
import org.json4s.JsonAST.{JObject, JValue}
import org.json4s.native.JsonMethods
import org.json4s.{DefaultFormats, Formats}

import scala.util.Try

object EdgeChannelDataPacketStreamProvider {

  implicit lazy val formats: Formats = DefaultFormats

  def create(env: StreamExecutionEnvironment): DataStream[EdgeChannelDataPacketUncompressedKafkaMessage] = {
    val properties = new Properties()
    properties.setProperty("bootstrap.servers", "kafka-server:9092")
    properties.setProperty("group.id", "dp_decoder.EdgeChannelDataPacketUncompressedStream")
    val der = new SimpleStringSchema()
    val consumer =
      new FlinkKafkaConsumer[String](
        "EdgeChannelDataPacketUncompressed2",
        der,
        properties
      )
    consumer.setStartFromLatest()
    val stream = env.addSource(consumer)
    stream.map(msg => parseMsg(msg)).filter(_.isSuccess).map(_.get)
  }

  def parseMsg(msg: String): Try[EdgeChannelDataPacketUncompressedKafkaMessage] = Try {
    val json    = JsonMethods.parse(msg)
    val id      = (json \ "id").extract[String]
    val encoder = (json \ "encoder").extract[String]
    val params  = (json \ "encoderParams").extract[Option[JValue]]
    val aux     = JObject(json.filterField(f => !pFields.contains(f._1)))
    val data    = (json \ "data").extract[String].getBytes
    EdgeChannelDataPacketUncompressedKafkaMessage(id, data, encoder, params, aux)
  }

}
