package com.qyer.commons.kafka;

import static com.google.common.base.Preconditions.checkArgument;
import static org.apache.commons.lang3.StringUtils.isNotBlank;

import com.qyer.commons.concurrent.GenericLinkedBlockingQueue;
import com.qyer.commons.exception.KafkaOperationException;
import org.apache.commons.lang3.StringUtils;
import org.apache.kafka.clients.producer.KafkaProducer;
import org.apache.kafka.clients.producer.Producer;
import org.apache.kafka.clients.producer.ProducerRecord;
import org.apache.kafka.clients.producer.RecordMetadata;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;

import java.io.IOException;
import java.io.InputStream;
import java.util.Properties;
import java.util.concurrent.CountDownLatch;
import java.util.concurrent.Future;
import java.util.concurrent.TimeUnit;

/**
 * User: Z J Wu Date: 2016/07/22 Time: 14:13 Package: com.qyer.commons.kafka
 */
public class GenericKafkaProducer<T extends SelfSerializableRecord<T>, R1, R2> extends
  BasicQueueOperationComponent<T, R1, R2> implements Runnable {

  public static class Builder<T extends SelfSerializableRecord<T>, R1, R2> {

    private String name;
    private String topic;

    private long putTimeout;

    private GenericLinkedBlockingQueue<T> inQueue;

    private CountDownLatch exitSignal;

    private String conf;

    private OperationHook<T, R1> beforeSendHook;
    private OperationHook<T, R2> afterSendHook;

    public Builder() {
    }

    public Builder<T, R1, R2> name(String name) {
      this.name = name;
      return this;
    }

    public Builder<T, R1, R2> topic(String topic) {
      this.topic = topic;
      return this;
    }

    public Builder<T, R1, R2> config(String conf) {
      String c = StringUtils.trimToNull(conf);
      this.conf = c;
      return this;
    }

    public Builder<T, R1, R2> putTimeout(long putTimeout) {
      this.putTimeout = putTimeout;
      return this;
    }

    public Builder<T, R1, R2> inQueue(GenericLinkedBlockingQueue<T> inQueue) {
      this.inQueue = inQueue;
      return this;
    }

    public Builder<T, R1, R2> exitSignal(CountDownLatch exitSignal) {
      this.exitSignal = exitSignal;
      return this;
    }

    public Builder<T, R1, R2> registerBeforeSendHook(OperationHook<T, R1> beforeSendHook) {
      this.beforeSendHook = beforeSendHook;
      return this;
    }

    public Builder<T, R1, R2> registerAfterSendHook(OperationHook<T, R2> afterSendHook) {
      this.afterSendHook = afterSendHook;
      return this;
    }

    public GenericKafkaProducer<T, R1, R2> build() {
      checkArgument(isNotBlank(name), "Producer name is blank.");
      checkArgument(isNotBlank(topic), "Producer topic is blank.");
      checkArgument(isNotBlank(conf), "Producer configuration file path is blank.");
      checkArgument(putTimeout > 0, "Producer put timeout is not greater than 0.");
      checkArgument(inQueue != null, "Input queue is null.");
      checkArgument(exitSignal != null, "Exit signal is null.");
      GenericKafkaProducer<T, R1, R2> producer = new GenericKafkaProducer<>(name, topic, inQueue,
                                                                            putTimeout, conf,
                                                                            exitSignal);

      producer.setBeforeOperationHook(beforeSendHook);
      producer.setAfterOperationHook(afterSendHook);
      return producer;
    }

  }

  private static final Logger LOGGER = LoggerFactory.getLogger(GenericKafkaProducer.class);

  private String name;

  private Producer<String, T> producer;

  private String topic;

  private long putTimeout;

  private GenericLinkedBlockingQueue<T> inQueue;

  private CountDownLatch exitSignal;

  private GenericKafkaProducer(String name, String topic, GenericLinkedBlockingQueue<T> inQueue,
                               long putTimeout, String confPath, CountDownLatch exitSignal) {
    this.name = name;
    this.topic = topic;
    this.inQueue = inQueue;
    this.putTimeout = putTimeout;
    this.exitSignal = exitSignal;
    InputStream is = this.getClass().getClassLoader().getResourceAsStream(confPath);
    Properties props = new Properties();
    try {
      props.load(is);
    } catch (IOException e) {
      throw new IllegalArgumentException(e);
    }
    this.producer = new KafkaProducer<>(props);
  }

  public RecordMetadata send(T t) throws KafkaOperationException {
    t.getSummary().setEnqueueTime(System.currentTimeMillis());
    ProducerRecord pr = new ProducerRecord(topic, t.toKey(), t);
    beforeOperation(t);
    Future<RecordMetadata> future = producer.send(pr);
    try {
      RecordMetadata response = future.get(putTimeout, TimeUnit.MILLISECONDS);
      afterOperation(t);
      return response;
    } catch (Exception e) {
      throw new KafkaOperationException(e);
    }
  }

  public void shutdown() {
    if (producer != null) {
      producer.close();
    }
  }

  @Override
  public void run() {
    try {
      while (true) {
        T t = inQueue.take();
        if (t == null) {
          continue;
        }
        if (t.isPill()) {
          break;
        }
        try {
          send(t);
        } catch (KafkaOperationException e) {
          LOGGER.warn("KafkaProducer(" + name + ") Cannot send record to kafka cluster.", e);
        }
      }
      LOGGER.info("KafkaProducer({}) stopped.", name);
    } catch (InterruptedException e) {
      LOGGER.warn("KafkaProducer({}) interrupted.", name);
    } finally {
      shutdown();
      exitSignal.countDown();
    }
  }

  public String getName() {
    return name;
  }

  public String getTopic() {
    return topic;
  }

  public long getPutTimeout() {
    return putTimeout;
  }
}
