package com.zhxKafka2boot;

import com.alibaba.fastjson.JSONObject;
import org.apache.kafka.clients.consumer.ConsumerRecord;
import org.apache.kafka.clients.consumer.OffsetAndMetadata;
import org.apache.kafka.common.TopicPartition;
import org.springframework.kafka.annotation.KafkaListener;
import org.springframework.stereotype.Component;
import org.apache.commons.collections4.map.HashedMap;

import java.io.UnsupportedEncodingException;
import java.text.DateFormat;
import java.text.SimpleDateFormat;
import java.util.Map;

/**
 * @ program: elasticSearch_springBoot_study
 * @ description:
 * @ author: Mr.Zhongsf
 * @ create: 2021-03-12 14:07
 **/
@Component
public class KafkaConsumer {
    private static Map<TopicPartition, OffsetAndMetadata> currentOffsets = new HashedMap<TopicPartition, OffsetAndMetadata>();
//
//    // 消费监听
//    @KafkaListener(topics = {"SingleLogin4A"})
//    public void getKafkaMessage(ConsumerRecord<?, ?> record) {
//        // 消费的哪个topic、partition的消息,打印出消息内容
//        System.out.println("简单消费：" + record.topic() + "-" + record.partition() + "-" + record.value());
//        currentOffsets.put(new TopicPartition(record.topic(), record.partition()), new OffsetAndMetadata(record.offset() ));
//
//        DateFormat sdf = new SimpleDateFormat("yyyy/MM/dd HH:mm:ss");
//
//        //kafka record timestamp
//        String kafkaRecordTime = sdf.format(record.timestamp());
//
//        //kafka record 分区
//        String kafkaPartition = Integer.toString(record.partition());
//
//        //kafka consumer消费数据offset 记录
//        String kafkaOffset = Long.toString(record.offset());
//
//        //kafka位置记录拼接
//        String kafkaMemory = "P"+kafkaPartition+"_"+kafkaOffset;
//
//        //kafka消息解析
//        String kafkaStr = record.value().toString();
//
//        String str2 = kafkaStr.substring(kafkaStr.indexOf("{"),kafkaStr.indexOf("}")+1);
//
//        JSONObject kafkaJsonObject = JSONObject.parseObject(str2);
//
//        String rawStr = kafkaJsonObject.getString("RAW");
//        System.out.println(rawStr);
//
//        String[] splitStr = rawStr.split(";");
//
//    }
//

    // 消费监听
    @KafkaListener(topics = {"UMAP"})
    public void getKafkaMessage2(ConsumerRecord<?, ?> record){
        // 消费的哪个topic、partition的消息,打印出消息内容
        System.out.println("简单消费：" + record.topic() + "-" + record.partition() + "-" + record.value());
        currentOffsets.put(new TopicPartition(record.topic(), record.partition()), new OffsetAndMetadata(record.offset() ));
    }

}
