package com.eshipenet.shipnet.edpersistor

import java.util.concurrent.TimeUnit

import org.apache.flink.streaming.api.scala._
import org.influxdb.dto.Point
import org.joda.time.DateTime

import scala.util.Try

object Main {

  def main(args: Array[String]): Unit = {
    val env = StreamExecutionEnvironment.getExecutionEnvironment

    val dataStream = EquipmentDataSourceProvider.create(env)

    val pointStream = dataStream
      .map(msg => {
        val t                = java.time.ZonedDateTime.now()
        val processTimestamp = t.toEpochSecond * 1000 * 1000 * 1000 + t.getNano
        TSDBData.build(msg, processTimestamp)
      })
      .filter(_.isSuccess)
      .map(_.get)
      .flatMap(v => v)
      .map(toPoint _)
      .filter(_.isSuccess)
      .map(_.get)

    pointStream.print("point")

    pointStream.addSink(EquipmentDataSinkProvider.create(env))

    val name = this.getClass.getName
    env.execute(name)
  }

  def toPoint(data: TSDBData): Try[InfluxDBPoint] = Try {
    if (data.measurement.trim == "") {
      throw new Exception("measurement must not be empty")
    }
    val p = Point.measurement(data.measurement).time(data.timestamp, TimeUnit.NANOSECONDS)
    data.tags.foreach(kv => p.tag(kv._1, kv._2))
    data.fields.foreach { kv =>
      kv._2 match {
        case TSDBDataFieldInt(value)    => p.addField(kv._1, value)
        case TSDBDataFieldFloat(value)  => p.addField(kv._1, value)
        case TSDBDataFieldString(value) => p.addField(kv._1, value)
        case TSDBDataFieldBool(value)   => p.addField(kv._1, value)
      }
    }
    val point = p.build()
    InfluxDBPoint("equipmenttest3", "autogen", point)
  }

}
