package com.eshipenet.shipnet.dp_unpacker

import java.util.Properties

import org.apache.flink.api.common.serialization.SimpleStringSchema
import org.apache.flink.streaming.api.scala._
import org.apache.flink.streaming.connectors.kafka.FlinkKafkaConsumer
import org.json4s.JsonAST.{JObject, JValue}
import org.json4s.{DefaultFormats, Formats}
import org.json4s.native.JsonMethods

import scala.util.Try

object EdgeChannelDataPacketStreamProvider {

  implicit lazy val formats: Formats = DefaultFormats

  private val pFields = Set("id", "data")

  def create(env: StreamExecutionEnvironment): DataStream[EdgeChannelDataPacket] = {
    val properties = new Properties()
    properties.setProperty("bootstrap.servers", "kafka-server:9092")
    properties.setProperty("group.id", "dp_decoder.EdgeChannelDataPacketUncompressedStream")
    val der      = new SimpleStringSchema()
    val consumer = new FlinkKafkaConsumer[String]("EdgeChannelDataPacketDecoded2", der, properties)
    consumer.setStartFromLatest()
    val stream = env.addSource(consumer).map(msg => parseMsg(msg))
    stream.print("stream")
    val filtered = stream.filter(_.isSuccess).map(_.get)
    filtered
  }

  def parseMsg(msg: String): Try[EdgeChannelDataPacket] = Try {
    val json = JsonMethods.parse(msg)
    val id   = (json \ "id").extract[String]
    val dataTries = (json \ "data")
      .extract[Seq[JValue]]
      .map(d => EdgeChannelData.parseJSON(d))
    val data = Try(dataTries.map(_.get)).get
    val aux  = JObject(json.filterField(f => !pFields.contains(f._1)))
    EdgeChannelDataPacket(id, data, aux)
  }

}
