package com.antball.v1;

import java.io.IOException;
import java.util.concurrent.Executors;
import java.util.concurrent.ScheduledExecutorService;
import java.util.concurrent.ScheduledFuture;
import java.util.concurrent.TimeUnit;

/**
 * @Auther: huangsj
 * @Date: 2018/8/23 11:45
 * @Description:
 */
public class ConsumerDemo1 {


    public static void main(String[] args) throws IOException {

        ScheduledExecutorService service = Executors.newScheduledThreadPool(4);
        System.out.println("外部开始时间：" + System.currentTimeMillis());

        Runnable r = new ConsumerThread();

        for (int i =0; i < 1; i++){
            ScheduledFuture<?> schedule = service.schedule(
                    r,
                    5L,
                    TimeUnit.SECONDS);
        }


//        Properties properties = new Properties();
//        InputStream in = ConsumerDemo.class.getClassLoader().getResourceAsStream("consumer.properties");
//        properties.load(in);
//
//        Consumer<String, String> consumer = new KafkaConsumer<String, String>(properties);
//
//        Collection<String> topics = Arrays.asList("hadoop");
//        // 消费者订阅topic
//        consumer.subscribe(topics);
//        ConsumerRecords<String, String> consumerRecords = null;
//        while (true) {
//            // 接下来就要从topic中拉取数据
//            consumerRecords = consumer.poll(1000);
//            // 遍历每一条记录
//            for (ConsumerRecord consumerRecord : consumerRecords) {
//                long offset = consumerRecord.offset();
//                int partition = consumerRecord.partition();
//                Object key = consumerRecord.key();
//                Object value = consumerRecord.value();
//                System.out.format("%d\t%d\t%s\t%s\n", offset, partition, key, value);
//            }
//
//        }
    }
}
