package com.leo.jdkcore.kafka;

import java.util.Properties;
import java.util.concurrent.ThreadPoolExecutor;

import org.apache.kafka.clients.consumer.ConsumerConfig;
import org.springframework.scheduling.concurrent.ThreadPoolTaskExecutor;

public class KafkaConsumerTest {
	
	/*
	 props.put("zookeeper.connect", context.getZkConnect());
	props.put("auto.offset.reset", context.getResetOffset()); // fix offset for mutil group
	props.put("group.id", groupId);
	props.put("zookeeper.connection.timeout.ms", "10000");
	props.put("zookeeper.session.timeout.ms", "10000");
	props.put("zookeeper.sync.time.ms", "2000");
	props.put("auto.commit.interval.ms", "1000");
	props.put("fetch.message.max.bytes", "42000000"); // default value 1m
	props.put("replica.fetch.max.bytes", "42000000");
	// 确保rebalance.max.retries * rebalance.backoff.ms > zookeeper.session.timeout.ms
	props.put("rebalance.backoff.ms", "2000"); // 每次再平衡的时间间隔
	props.put("rebalance.max.retries", "10");// 此值用于控制,注册节点的重试次数.
	 */
	public static void main(String[] args) {
		Properties props = new Properties();
		props.put("zookeeper.connect", "192.168.0.168:2181/kafka");
		props.put(ConsumerConfig.BOOTSTRAP_SERVERS_CONFIG, "192.168.0.168:9092,192.168.0.169:9092");
		props.put(ConsumerConfig.GROUP_ID_CONFIG, "zdr_01_lian");
		props.put(ConsumerConfig.ENABLE_AUTO_COMMIT_CONFIG, "true");
		props.put(ConsumerConfig.AUTO_COMMIT_INTERVAL_MS_CONFIG, "2000");
		props.put(ConsumerConfig.SESSION_TIMEOUT_MS_CONFIG, "30000");

		KafkaReaderContext context = new KafkaReaderContext.Builder().build();
		
		ThreadPoolTaskExecutor executor = new ThreadPoolTaskExecutor();
		executor.setThreadNamePrefix("lian");
		executor.setCorePoolSize(2);
		executor.setMaxPoolSize(3);
		executor.setQueueCapacity(10);
		executor.setRejectedExecutionHandler(new ThreadPoolExecutor.CallerRunsPolicy());
		executor.initialize();
		context.setExecutor(executor);
		
		for (int i = 0; i < 1; i++) {
			KafkaMsgConsumer<Integer, byte[]> consumer = new KafkaMsgConsumer<Integer, byte[]>("zdr_01", props,
					byte[].class, Integer.class, new KafkaByteMsgHandler(context));
			consumer.start();
		}
	}

}