package ConsumerTest;


import java.io.IOException;
import java.io.PrintStream;
import java.util.Arrays;
import java.util.Date;
import java.util.Properties;

import org.apache.kafka.clients.consumer.*;

import com.sun.prism.impl.Disposer.Record;

import ty.pub.*;
import ty.pub.Error;

/**
 * Hello world!
 *
 */
public class ConsumerDemo {
	public static void main(String[] args) throws IOException {
		Properties props = new Properties();
		props.setProperty("bootstrap.servers", "192.168.15.219:9092");
		props.setProperty("group.id", "StormParseTopologyMonitor_Test");
		props.setProperty("enable.auto.commit", "true");
		props.setProperty("auto.commit.interval.ms", "1000");
		props.setProperty("key.deserializer", "org.apache.kafka.common.serialization.StringDeserializer");
		props.setProperty("value.deserializer", "ty.pub.RawPacketDecoder");
		KafkaConsumer<String, RawDataPacket> consumer = new KafkaConsumer<String, RawDataPacket>(props);
		consumer.subscribe(Arrays.asList("TY100"));
//		consumer.poll(100);
//		consumer.seekToEnd(consumer.assignment());
		PrintStream p = System.out;
		int i = 0;
		while (true) {
			ConsumerRecords<String, RawDataPacket> records = consumer.poll(100);
			for (ConsumerRecord<String, RawDataPacket> record : records){
				System.out.println("----------- " + i + " -------------");
				System.out.println(BeanUtil.ObjectToString(record.value()));
			}
			p.flush();
		}
	}
}
