package javaConsumer;

import kafka.api.FetchRequest;
import kafka.api.FetchRequestBuilder;
import kafka.api.OffsetRequest;
import kafka.api.PartitionOffsetRequestInfo;


import kafka.common.TopicAndPartition;
import kafka.javaapi.FetchResponse;
import kafka.javaapi.OffsetResponse;
import kafka.javaapi.consumer.SimpleConsumer;
import kafka.javaapi.message.ByteBufferMessageSet;
import kafka.message.MessageAndOffset;

import java.io.UnsupportedEncodingException;
import java.nio.ByteBuffer;
import java.util.HashMap;
import scala.collection.JavaConversions.*;
public class SimpleJavaConsumer {
    private static void printMessages(ByteBufferMessageSet messageSet) throws UnsupportedEncodingException {

        for(MessageAndOffset messageAndOffset: messageSet) {
            ByteBuffer payload = messageAndOffset.message().payload();
            byte[] bytes = new byte[payload.limit()];
            payload.get(bytes);

            System.out.println(new String(bytes, "UTF-8"));
        }
    }
    public static void main(String[] args)throws Exception {
        // new kafka.consumer.SimpleConsumer(host, port, soTimeout, bufferSize, clientId)

        SimpleConsumer simpleConsumer = new SimpleConsumer("s60",
                 9092,
                10000, 100 * 1024,
                KafkaProperties.clientId);

        System.out.println("Testing single fetch");
        FetchRequest req = new FetchRequestBuilder()
                .clientId(KafkaProperties.clientId)
                //addFetch(topic: String, partition: Int, offset: Long, fetchSize: Int)
                .addFetch(KafkaProperties.topic, 0, 0, 1000)
                .build();
        System.out.println(simpleConsumer.bufferSize());

        FetchResponse fetchResponse = simpleConsumer.fetch(req);

        System.out.println(fetchResponse);
        ByteBufferMessageSet messageSet = fetchResponse.messageSet(KafkaProperties.topic, 0);
        System.out.println(messageSet);
        printMessages(fetchResponse.messageSet(KafkaProperties.topic, 0));

// val kafkaConsumer = new SimpleConsumer(brokerInfo.host, brokerInfo.port, 10000, 100 * 1024, "UpdateOffsetsInZk")
        //val topicAndPartition = TopicAndPartition(topic, partition)
        //设置 offset
        //val request = OffsetRequest(Map(topicAndPartition -> PartitionOffsetRequestInfo(offsetOption, 1)))
        //val offset = kafkaConsumer.getOffsetsBefore(request).partitionErrorAndOffsets(topicAndPartition).offsets
        //  println("offset..."+ offset.toList)
        final TopicAndPartition topicAndPartition = new TopicAndPartition(KafkaProperties.topic, 0);
        //class OffsetRequest(requestInfo: java.util.Map[TopicAndPartition, PartitionOffsetRequestInfo],
      //  versionId: Short,
        //        clientId: String)
        PartitionOffsetRequestInfo offsetRequestInfo = new PartitionOffsetRequestInfo(-2L, 1);

        HashMap map = new HashMap();

        map.put(topicAndPartition, offsetRequestInfo);
        //Map[TopicAndPartition, PartitionOffsetRequestInfo]
        //(requestInfo: Map[TopicAndPartition, PartitionOffsetRequestInfo], correlationId: Int, replicaId: Int)

       // OffsetRequest offsetRequest = new OffsetRequest(,1,1);
      //  OffsetResponse offsetsBefore = simpleConsumer.getOffsetsBefore(offsetRequest);
//        System.out.println(offsetsBefore);


    }
}
