
 /**
 * 上海中赢金融信息服务有限公司
 * Copyright (c) 2017, chinazyjr All Rights Reserved.
 */
package com.sys.kafka.demos.level3;

import java.util.ArrayList;
import java.util.Arrays;
import java.util.List;
import java.util.Properties;

import org.apache.kafka.clients.consumer.ConsumerRecord;
import org.apache.kafka.clients.consumer.ConsumerRecords;
import org.apache.kafka.clients.consumer.KafkaConsumer;
import org.apache.kafka.clients.producer.Callback;
import org.apache.kafka.clients.producer.KafkaProducer;
import org.apache.kafka.clients.producer.ProducerRecord;
import org.apache.kafka.clients.producer.RecordMetadata;

/**
 * <b>Description：</b> TODO <br/>
 * <b>ClassName：</b> ProducerImpl <br/>
 * <b>@author：</b> mobing <br/>
 * <b>@date：</b> 2017年7月11日 上午11:14:54 <br/>
 * <b>@version: </b>  <br/>
 */
public class ConsumerImpl {

    private  KafkaProducer<String, String> createProducer() {  
//        Properties properties = new Properties();  
//        properties.put("zookeeper.connect", "10.0.3.180:2181,10.0.3.181:2181,10.0.3.182:2181");//声明zk  
//        properties.put("serializer.class", StringEncoder.class.getName());  
//        properties.put("metadata.broker.list", "10.0.3.180:9092,10.0.3.181:9092,10.0.3.182:9092");// 声明kafka broker  
//        //return new Producer<Integer, String>(new ProducerConfig(properties));  
        
        
        Properties props = new Properties();
        props.put("bootstrap.servers", "10.0.3.180:9092,10.0.3.181:9092,10.0.3.182:9092");
        props.put("acks", "1");
        props.put("retries", 0);
        props.put("batch.size", 16384);
        props.put("client.id", "test_jacky_producer");
        
        props.put("linger.ms", 1);
        props.put("buffer.memory", 33554432);
        props.put("key.serializer", "org.apache.kafka.common.serialization.StringSerializer");
        props.put("value.serializer", "org.apache.kafka.common.serialization.StringSerializer");
        
//       Producer<String, String> producer = new KafkaProducer<>(props);
       
       KafkaProducer<String, String> kproducer = new KafkaProducer<String, String>(props);
       
       
//       ProducerRecord<String, String> record = new ProducerRecord<String, String>("producer_test", "2223132132",
//               "test23_60");
//       
//       kproducer.send(record, new Callback() {
//           @Override
//           public void onCompletion(RecordMetadata metadata, Exception e) {
//               // TODO Auto-generated method stub
//               if (e != null)
//                  System.out.println("the producer has a error:" + e.getMessage());
//               else {
//                   System.out.println("The offset of the record we just sent is: " + metadata.offset());
//                   System.out.println("The partition of the record we just sent is: " + metadata.partition());
//               }
//
//           }
//       });
//       
//       kproducer.close();
//       
//        for(int i = 0; i < 100; i++)
//            producer.send(new ProducerRecord<String, String>("my-topic", Integer.toString(i), Integer.toString(i)));
        
        return kproducer;
     }  
    
    /**
     * 发送一条消息
     * @param message
     */
    public void consumer(KafkaProducer<String, String> kproducer,String topic, String message) {
        Properties props = new Properties();
        props.put("bootstrap.servers", "10.0.3.180:9092,10.0.3.181:9092,10.0.3.182:9092");
        props.put("group.id","manual_g1");

        props.put("enable.auto.commit", "false");
        props.put("auto.offset.reset", "earliest");
        //
        props.put("session.timeout.ms", "30000");
        props.put("key.deserializer", "org.apache.kafka.common.serialization.StringDeserializer");
        props.put("value.deserializer", "org.apache.kafka.common.serialization.StringDeserializer");
        
        KafkaConsumer<String ,String> consumer = new KafkaConsumer<String ,String>(props);
        consumer.subscribe(Arrays.asList("test"));
        final int minBatchSize = 5;  //批量提交数量
         List<ConsumerRecord<String, String>> buffer = new ArrayList<>();
         while (true) {
             ConsumerRecords<String, String> records = consumer.poll(100);
             for (ConsumerRecord<String, String> record : records) {
                 System.out.println("consumer message values is "+record.value()+" and the offset is "+ record.offset());
                 buffer.add(record);
             }
             if (buffer.size() >= minBatchSize) {
                 System.out.println("now commit offset");
                 consumer.commitSync();
                 buffer.clear();
             }
         }
        
    }

    
    public static void main(String[] args) {
        ConsumerImpl producerImpl = new ConsumerImpl();
//        KafkaProducer<String, String>  producer= producerImpl.createProducer();
        
        producerImpl.consumer(null, null, null);
        System.out.println("----------------完-----");
    }
    
}
