package com.app.util.kafka;

import java.util.Properties;
import java.util.concurrent.ExecutorService;

import org.apache.kafka.clients.consumer.KafkaConsumer;

import com.app.conf.AppConfig;

/**
 * 消息队列消费工具类
 * **/
public class KafkaConsumption 
{

    //private KafkaConsumer<String, String> consumer;
	
    private Properties props;
	/**
	 * 
	 * **/
	public KafkaConsumption(String host,String groupId)
	{
		AppConfig.getAppConfig().getLogger().info(host);
		AppConfig.getAppConfig().getLogger().info(groupId);
		
	    props = new Properties();
	    props.setProperty("bootstrap.servers", host);
	    props.setProperty("group.id", groupId);
	    props.setProperty("enable.auto.commit", "true");
	    props.setProperty("key.deserializer", "org.apache.kafka.common.serialization.StringDeserializer");
	    props.setProperty("value.deserializer", "org.apache.kafka.common.serialization.StringDeserializer");
	    
	    //consumer = new KafkaConsumer<>(props);
	}
	
	/**
	 * 监听topics 队列
	 * @param kafkaParam kafak参数
	 * **/
	public void consume(KafkaParam... kafkaParams)
	{
        int threadNum = 0; //消费者迭代器数量
	    //计算所需要的线程池大小
        for(KafkaParam topic : kafkaParams)
        {
            threadNum += topic.getNum();
        }

        System.out.println(":::::::::::"+threadNum);

        ExecutorService executorService = AppConfig.getAppConfig().getFixedThreadPool(10);//用于异步执行消费者的线程池
        //监听消息
        for(KafkaParam topic : kafkaParams)
        {
            for(int i=0;i<topic.getNum();i++)
            {
                executorService.execute(new KafkaConsumWork(new KafkaConsumer<>(props),topic.getName(),i,topic));
                
                System.out.println(topic.getName()+"-"+i);
            }
        }
	}
	
    public void close()
    {
    	//consumer.shutdown();
    	//consumer.close();
    }
}