

import com.google.gson.Gson;
import org.apache.kafka.clients.consumer.ConsumerRecord;
import org.apache.kafka.clients.consumer.ConsumerRecords;
import org.apache.kafka.clients.consumer.KafkaConsumer;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;

import java.util.Arrays;
import java.util.Map;
import java.util.Properties;

import static org.apache.kafka.clients.consumer.ConsumerConfig.*;


public class KafkaClient {
    private static final Logger logger = LoggerFactory.getLogger(KafkaClient.class);
    private static Gson gson = new Gson();

    /*
     kafka.url=10.88.201.79:9092,10.88.201.80:9092,10.88.201.81:9092
kafka.topic=
kafka.groupId=
     */
    private String url = "10.88.201.79:9092,10.88.201.80:9092,10.88.201.81:9092";//;
    private String topic ="pos";
    private String groupId ="sy_ue326";

    private static Thread thread = null;

    public static void main(String[] args) {
    	new KafkaClient().startConsumMsg();
	}
    
    
    public void startConsumMsg() {
        if (thread == null) {
            thread = new Thread("kafka") {
                @Override
                public void run() {
                    Properties props = new Properties();
                    props.put(BOOTSTRAP_SERVERS_CONFIG, url);
                    props.put(GROUP_ID_CONFIG, groupId);
                    props.put(ENABLE_AUTO_COMMIT_CONFIG, "true");
                    props.put(AUTO_COMMIT_INTERVAL_MS_CONFIG, "1000");
                    props.put(SESSION_TIMEOUT_MS_CONFIG, "30000");
                    props.put(KEY_DESERIALIZER_CLASS_CONFIG, "org.apache.kafka.common.serialization.StringDeserializer");
                    props.put(VALUE_DESERIALIZER_CLASS_CONFIG, "org.apache.kafka.common.serialization.StringDeserializer");
                    KafkaConsumer<String, String> consumer = new KafkaConsumer<String, String>(props);
                    consumer.subscribe(Arrays.asList(topic));
                    logger.info("Consumer is running...");
                    while (true) {
                        ConsumerRecords<String, String> records = consumer.poll(100);
//                        logger.info("from [{}] topic [{}] get count :[{}]",new Object[]{url,topic,records.count()});
                        for (ConsumerRecord<String, String> record : records) {
                            logger.info("kafka Data:"+record.value());
                            try {
                                Map<String, String> params = gson.fromJson(record.value(), Map.class);
                               /* ActionExecutor executor = null;
                                try {
                                    executor = Dispatcher.getExecutor(params);
                                    if (executor == null)
                                        continue;
                                    executor.execute();
                                } catch (MyException e) {
                                    logger.error("excute:"+e);
                                }*/

                                Thread.sleep(500);
                            } catch (Exception ex) {
                                logger.error("Error:",ex);
                            }
                        }
                    }
                }
            };
            //thread.setDaemon(true);
            thread.start();
        } else {
            thread.run();
        }
    }

}
