package com.lvmama.rhino.common.utils.kafka

import java.util.concurrent.atomic.AtomicReference

import org.apache.kafka.clients.producer.{Callback, ProducerRecord, RecordMetadata}
import org.apache.spark.TaskContext
import org.apache.spark.streaming.dstream.DStream

/**
  * Created by yuanxiaofeng on 2016/6/23.
  */
class KafkaDStreamSink(dstream: DStream[KafkaPayload]) {

  def sendToKafka(config: Map[String, String], topic: String): Unit = {
    dstream.foreachRDD { rdd =>
      rdd.foreachPartition { records =>
        val producer = KafkaProducerFactory.getOrCreateProducer(config)

        val context = TaskContext.get()
        //        val logger

        val callback = new KafkaDStreamSinkExceptionHandler

        val metadata = records.map{ record =>
          callback.throwExceptionIfAny()
          producer.send(new ProducerRecord(topic, record.key.orNull, record.value), callback)
        }.toList

        metadata.foreach{ metadata => metadata.get()}

        callback.throwExceptionIfAny()
      }
    }
  }
}

object KafkaDStreamSink {
  import scala.language.implicitConversions
  implicit def createKafkaDStreamSink(dStream: DStream[KafkaPayload]): KafkaDStreamSink = {
    new KafkaDStreamSink(dStream)
  }
}

class KafkaDStreamSinkExceptionHandler extends Callback{

  private val lastException = new AtomicReference[Option[Exception]](None)

  override def onCompletion(metadata: RecordMetadata, exception: Exception): Unit =
    lastException.set(Option(exception))

  def throwExceptionIfAny(): Unit = lastException.getAndSet(None).forall(ex => throw ex)
}
