package com.tengju.bff.interfaces.manage.kafka;

import com.tengju.bff.interfaces.shared.ApiResponse;
import com.tengju.bff.interfaces.shared.servlet.ManageEntrance;
import com.tengju.bff.interfaces.shared.servlet.NotTokenValid;
import io.swagger.annotations.Api;
import io.swagger.annotations.ApiOperation;
import lombok.extern.slf4j.Slf4j;
import org.apache.kafka.clients.consumer.KafkaConsumer;
import org.apache.kafka.clients.consumer.OffsetAndMetadata;
import org.apache.kafka.clients.consumer.OffsetAndTimestamp;
import org.apache.kafka.common.PartitionInfo;
import org.apache.kafka.common.TopicPartition;
import org.apache.kafka.common.serialization.StringDeserializer;
import org.springframework.http.MediaType;
import org.springframework.web.bind.annotation.RequestMapping;
import org.springframework.web.bind.annotation.RequestMethod;
import org.springframework.web.bind.annotation.RequestParam;
import org.springframework.web.bind.annotation.RestController;

import java.time.LocalDateTime;
import java.time.ZoneOffset;
import java.time.format.DateTimeFormatter;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.Properties;

/**
 * @author chenhang
 * @Date 2021/3/15
 */

@RestController
@Api(tags = "kafka工具类")
@RequestMapping(value = "manage/kafka")
@Slf4j
@ManageEntrance
public class OffsetUtilController {


    @ApiOperation(value = "重置offset", notes = "重置offset")
    @RequestMapping(value = "resetOffset", method = RequestMethod.GET, produces = MediaType.APPLICATION_JSON_UTF8_VALUE)
    @NotTokenValid
    public ApiResponse<Boolean> resetOffset(@RequestParam("consumerGroupId") String consumerGroupId,
                                            @RequestParam("kafkaHost") String kafkaHost,
                                            @RequestParam("topic") String topic,
                                            @RequestParam("dateTime") String dateTime) {
        KafkaConsumer<String, String> consumer = getKafkaConsumer(consumerGroupId, kafkaHost);
        List<PartitionInfo> partitionInfos = consumer.partitionsFor(topic);
        LocalDateTime localDateTime = LocalDateTime.parse(dateTime, DateTimeFormatter.ofPattern("yyyy-MM-dd HH:mm:ss"));
        Long timestamp = localDateTime.toInstant(ZoneOffset.ofHours(8)).toEpochMilli();
        Map<TopicPartition, Long> timestampsToSearch = new HashMap<>();
        for (PartitionInfo partitionInfo : partitionInfos) {
            TopicPartition topicPartition = new TopicPartition(topic, partitionInfo.partition());
            timestampsToSearch.put(topicPartition,timestamp);
        }
        Map<TopicPartition, OffsetAndTimestamp> topicPartitionOffsetAndTimestampMap = consumer.offsetsForTimes(timestampsToSearch);
        Map<TopicPartition, OffsetAndMetadata> offsets = new HashMap();
        for (TopicPartition topicPartition : topicPartitionOffsetAndTimestampMap.keySet()) {
            OffsetAndTimestamp offsetAndTimestamp = topicPartitionOffsetAndTimestampMap.get(topicPartition);
            if (offsetAndTimestamp != null) {
                offsets.put(topicPartition,new OffsetAndMetadata(offsetAndTimestamp.offset()));
            }
        }
        consumer.commitSync(offsets);
        return ApiResponse.newSuccess(true);
    }

    private static KafkaConsumer<String, String> getKafkaConsumer(String consumerGroupId, String kafkaHost) {
        Properties props = new Properties();
        props.put("bootstrap.servers", kafkaHost);
        props.put("group.id", consumerGroupId);
        props.put("key.deserializer", StringDeserializer.class.getName());
        props.put("enable.auto.commit", true);
        props.put("value.deserializer", StringDeserializer.class.getName());
        props.put("session.timeout.ms", "30000");
        props.put("auto.offset.reset", "earliest");
        return new KafkaConsumer<>(props);
    }
}
