package com.eshipenet.shipnet.edpersistor

import java.util.concurrent.TimeUnit

import org.apache.flink.configuration.Configuration
import org.apache.flink.streaming.api.functions.sink.{RichSinkFunction, SinkFunction}
import org.influxdb.dto.Point
import org.influxdb.{InfluxDB, InfluxDBFactory}
import scala.collection.JavaConverters._

class FlinkInfluxDBSink(config: InfluxDBConfig) extends RichSinkFunction[InfluxDBPoint] {

  private var influxDBClient: InfluxDB = _

  override def open(parameters: Configuration): Unit = {
    super.open(parameters)

    if (config.username.isDefined && config.username.get.nonEmpty && config.password.isDefined && config.password.get.nonEmpty) {
      influxDBClient = InfluxDBFactory.connect(config.url, config.username.get, config.password.get)
    } else {
      influxDBClient = InfluxDBFactory.connect(config.url)
    }

//    influxDBClient.setDatabase(config.database)

    if (config.batchActions > 0) {
      influxDBClient.enableBatch(config.batchActions,
                                 config.flushDuration.toNanos.toInt,
                                 java.util.concurrent.TimeUnit.NANOSECONDS)
    }

    if (config.isEnableGzip) {
      influxDBClient.enableGzip
    }

  }

  override def invoke(point: InfluxDBPoint, context: SinkFunction.Context[_]): Unit = {
    influxDBClient.write(point.database, point.rp, point.point)
  }

  override def close(): Unit = {
    if (influxDBClient.isBatchEnabled) {
      influxDBClient.disableBatch()
    }
    influxDBClient.close()
  }

}
