package com.gtmc.mould.kafka;
import java.util.Arrays;
import java.util.Properties;
import org.apache.kafka.clients.consumer.ConsumerRecord;
import org.apache.kafka.clients.consumer.ConsumerRecords;
import org.apache.kafka.clients.consumer.KafkaConsumer;
import org.apache.kafka.common.serialization.StringDeserializer;
import java.util.List;
import java.util.logging.Logger;

import com.alibaba.fastjson.JSONArray;
import com.alibaba.fastjson.JSONObject;
import com.gtmc.mould.jsonstr.JsonArrResolving;


/**
 * @author: MaLiang
 * @date: 2020/8/6 16:52
 * @version: 1.0
 * pass 平台kafka数据消费者
 **/
public class PaasKafkaConsumer implements Runnable {
    private KafkaConsumer<String, String> consumer;
    private ConsumerRecords<String, String> msgList;
    private String topic;
    private static final String GROUPID = "groupA";
    JsonArrResolving jsonR = new JsonArrResolving();
    Logger logger = Logger.getLogger(PaasKafkaConsumer.class.getName());


    public PaasKafkaConsumer(String topicName) {
        Properties props = new Properties();
        // master kafka所在主机名称 ，本地hosts已配置映射，未配置映射请修改为IP,集群用逗号分开
        //props.put("bootstrap.servers", "master:9092");
        props.put("bootstrap.servers","172.23.8.51:9092,172.23.8.52:9092,172.23.8.53:9092");

        props.put("group.id", GROUPID);
        props.put("enable.auto.commit", "true");
        props.put("auto.commit.interval.ms", "1000");
        props.put("session.timeout.ms", "30000");
        props.put("auto.offset.reset", "earliest");
        props.put("key.deserializer", StringDeserializer.class.getName());
        props.put("value.deserializer", StringDeserializer.class.getName());
        this.consumer = new KafkaConsumer<String, String>(props);
        this.topic = topicName;
        this.consumer.subscribe(Arrays.asList(topic));
    }

    @Override
    public void run() {
        int messageNo = 1;
        logger.info("==========开始消费kafka数据");
        try {
            for (;;) {
                msgList = consumer.poll(50);
                if(null!=msgList&&msgList.count()>0){
                    for (ConsumerRecord<String, String> record : msgList) {
                        System.out.println(messageNo+"=======receive: key = " + record.key() + ", value = " + record.value()+"消费完成"+" offset==="+record.offset());
                        String jsonstr = record.value().toString();
                        logger.info("==========消费到的kafka数据是："+jsonstr);
                        //解析json数据
                        jsonR.jsonToObject(jsonstr);
                        messageNo++;
                    }
                }else{
                    //Thread.sleep(500);
                }
            }
        } catch (Exception e) {
            e.printStackTrace();
        }  finally {
            consumer.close();
        }
    }

}
