
package cn.tang.tframe.mq.kafka.producer;

import cn.tang.tframe.common.base.MainConfigLoader;
import cn.tang.tframe.common.utils.lang.StringUtils;
import cn.tang.tframe.mq.IQtMessager;
import cn.tang.tframe.mq.QtMessager;
import cn.tang.tframe.mq.producer.IQtCallback;
import cn.tang.tframe.mq.producer.IQtProducer;
import org.apache.kafka.clients.producer.Callback;
import org.apache.kafka.clients.producer.Producer;
import org.apache.kafka.clients.producer.ProducerRecord;
import org.apache.kafka.clients.producer.RecordMetadata;
import org.apache.kafka.common.Metric;
import org.apache.kafka.common.MetricName;
import org.apache.kafka.common.PartitionInfo;

import java.io.Serializable;
import java.util.List;
import java.util.Map;
import java.util.concurrent.ExecutionException;
import java.util.concurrent.Future;
import java.util.concurrent.TimeUnit;

/**
 * Created by tanghc on 17/2/4.
 */
public class DelegateKafkaProducer<T extends Serializable>
	implements Producer<IQtMessager<T>, IQtMessager<T>>, IQtProducer {

	private Producer<IQtMessager<T>, IQtMessager<T>> producer;

	public DelegateKafkaProducer(
		Producer<IQtMessager<T>, IQtMessager<T>> producer) {
		this.producer = producer;
	}

	@Override
	public Future<RecordMetadata> send(
		ProducerRecord<IQtMessager<T>, IQtMessager<T>> record) {

		return producer.send(record);
	}

	@Override
	public Future<RecordMetadata> send(
		ProducerRecord<IQtMessager<T>, IQtMessager<T>> record,
		Callback callback) {

		return producer.send(record, callback);
	}

	@Override
	public void flush() {

		producer.flush();
	}

	@Override
	public List<PartitionInfo> partitionsFor(String topic) {

		return producer.partitionsFor(topic);
	}

	@Override
	public Map<MetricName, ? extends Metric> metrics() {

		return producer.metrics();
	}

	@Override
	public void close() {

		producer.close();
	}

	@Override
	public void close(long timeout, TimeUnit unit) {

		producer.close(timeout, unit);
	}

	@Override
	public <T extends Serializable> String sendAsyn(String topic, T o) {

		topic = wrapTopic(topic);
		IQtMessager<T> messager = new QtMessager(o);
		Future<RecordMetadata> send =
			this.send(new ProducerRecord(topic, messager));
		try {
			send.get();
		}
		catch (InterruptedException e) {
			e.printStackTrace();
		}
		catch (ExecutionException e) {
			e.printStackTrace();
		}
		return messager.getMsgId();
	}

	@Override
	public <T extends Serializable> String sendAsyn(
		String topic, T o, final IQtCallback callback) {

		topic = wrapTopic(topic);
		final IQtMessager<T> messager = new QtMessager(o);
		Future<RecordMetadata> send =
			this.send(new ProducerRecord(topic, messager), new Callback() {

				@Override
				public void onCompletion(
					RecordMetadata metadata, Exception exception) {

					callback.onComplete(messager.getMsgId(), exception, metadata);
				}
			});
		return messager.getMsgId();
	}

	@Override
	public <T extends Serializable> String sendSync(String topic, T o) {

		topic = wrapTopic(topic);
		IQtMessager<T> messager = new QtMessager(o);
		Future<RecordMetadata> send =
			this.send(new ProducerRecord(topic, messager));
		try {
			send.get();
		}
		catch (InterruptedException e) {
			throw new RuntimeException(
				"kafka send sendSync InterruptedException:" + topic, e);
		}
		catch (ExecutionException e) {
			throw new RuntimeException(
				"kafka send sendSync ExecutionException:" + topic, e);
		}
		return messager.getMsgId();
	}

	private String wrapTopic(String topic) {

		if (StringUtils.isBlank(topic)) {
			throw new RuntimeException("empty topic");
		}
		return StringUtils.join(new Object[] {
			MainConfigLoader.getInstance().getProductCode(),
			MainConfigLoader.getInstance().getAppCode(),
			topic
		}, '_');
	}
}
