package com.captjack.bigdata.flume.kafka;

import kafka.consumer.Consumer;
import kafka.consumer.ConsumerConfig;
import kafka.consumer.ConsumerIterator;
import kafka.consumer.KafkaStream;
import kafka.consumer.Whitelist;
import kafka.javaapi.consumer.ConsumerConnector;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.kafka.clients.consumer.KafkaConsumer;

import java.util.List;
import java.util.Properties;
import java.util.concurrent.ArrayBlockingQueue;
import java.util.concurrent.BlockingQueue;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.LinkedBlockingQueue;
import java.util.concurrent.ThreadFactory;
import java.util.concurrent.ThreadPoolExecutor;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.atomic.AtomicInteger;

/**
 * 自定义kafka消费者
 *
 * @author Jack Sparrow
 * @version 1.0.0
 * @date 2018/7/18 22:55
 * package com.captjack.bigdata.flume
 */
public class CustomizeKafkaConsumer {

    private static final Log logger = LogFactory.getLog(KafkaConsumer.class);

    /**
     * kafka消费缓冲队列
     */
    static final BlockingQueue<String> KAFKA_MESSAGE_QUEUE = new ArrayBlockingQueue<String>(30000);

    /**
     *
     */
    private final ConsumerConnector consumerConnector;

    private String topic;

    private int threadNum = 1;

    private ExecutorService fixedThreadPool;

    CustomizeKafkaConsumer(String zookeeperConn, String groupId, String topic, int threadNum) {
        this.topic = topic;
        if (threadNum > 0) {
            this.threadNum = threadNum;
        }
        Properties props = new Properties();
        props.put("zookeeper.connect", zookeeperConn);
        props.put("group.id", groupId);
        props.put("zookeeper.session.timeout.ms", "10000");
        props.put("zookeeper.sync.time.ms", "2000");
        props.put("auto.commit.interval.ms", "1000");
        props.put("partition.assignment.strategy", "roundrobin");
        logger.info("zookeeper.connect" + zookeeperConn);
        logger.info("group.id" + groupId);
        this.consumerConnector = Consumer.createJavaConsumerConnector(new ConsumerConfig(props));
    }

    public void shutdown() {
        if (this.consumerConnector != null) {
            this.consumerConnector.commitOffsets();
            this.consumerConnector.shutdown();
        }
        if (this.fixedThreadPool != null) {
            this.fixedThreadPool.shutdown();
        }
    }

    public void start() {
        List<KafkaStream<byte[], byte[]>> streams = this.consumerConnector.createMessageStreamsByFilter(new Whitelist(this.topic), this.threadNum);

        this.fixedThreadPool = new ThreadPoolExecutor(
                this.threadNum,
                this.threadNum,
                0L,
                TimeUnit.MILLISECONDS,
                new LinkedBlockingQueue<Runnable>(), new ThreadFactory() {
            AtomicInteger atomicInteger = new AtomicInteger();

            @Override
            public Thread newThread(Runnable runnable) {
                Thread thread = new Thread(runnable);
                thread.setName("Customize-kafka-consumer-thread" + this.atomicInteger.getAndIncrement());
                return thread;
            }
        });

        for (final KafkaStream stream : streams) {
            this.fixedThreadPool.submit(new Runnable() {
                @Override
                public void run() {
                    ConsumerIterator consumerIterator = stream.iterator();
                    String message = null;
                    while (consumerIterator.hasNext()) {
                        try {
                            message = new String((byte[]) consumerIterator.next().message());
                            KAFKA_MESSAGE_QUEUE.put(message);
                        } catch (Exception e) {
                            logger.error("Kafka Consumer fail msg = " + message, e);
                        }
                    }
                }
            });
        }
    }

}
