package com.devops.admin.temporal.activity.impl;

import com.devops.admin.model.DOSResource;
import com.devops.admin.model.KafkaTopic;
import com.devops.admin.service.KafkaTopicService;
import com.devops.admin.service.DOSResourceService;
import com.devops.admin.temporal.activity.SyncKafkaTopicActivity;
import com.devops.constant.DOSResourceEnum;
import com.devops.constant.DevOpsConstant;
import com.devops.constant.ResourceSubTypeEnum;
import io.temporal.spring.boot.ActivityImpl;
import lombok.extern.slf4j.Slf4j;
import org.apache.kafka.clients.admin.*;
import org.springframework.stereotype.Component;

import javax.annotation.Resource;
import java.time.LocalDateTime;
import java.util.*;
import java.util.concurrent.ExecutionException;
import java.util.stream.Collectors;

/**
 * @author jxt
 * @Notes
 * @date 2023/5/17 14:26
 */
@Slf4j
@Component
@ActivityImpl(taskQueues = DevOpsConstant.SYNC_KAFKA_TOPIC_WORKFLOW_TASK_QUEUE)
public class SyncKafkaTopicActivityImpl implements SyncKafkaTopicActivity {

    @Resource
    private DOSResourceService dosResourceService;
    @Resource
    private KafkaTopicService kafkaTopicService;
    @Override
    public void syncKafkaTopic() {
        log.info("sync kafka topic begin");
        List<DOSResource> kafkaResources = dosResourceService.lambdaQuery()
                .eq(DOSResource::getType, DOSResourceEnum.MQ.getType())
                .eq(DOSResource::getSubType, ResourceSubTypeEnum.KAFKA.getType())
                .list();
        kafkaResources.forEach(this::connectAndSync);
        log.info("sync kafka topic end");
    }

    private void connectAndSync(DOSResource kafkaResource) {
        HashMap<String, Object> configs = new HashMap<>();
        configs.put(AdminClientConfig.BOOTSTRAP_SERVERS_CONFIG, kafkaResource.getIp());
        configs.put(AdminClientConfig.REQUEST_TIMEOUT_MS_CONFIG, 30000);

        try (AdminClient adminClient = KafkaAdminClient.create(configs)) {
            log.info("connect kafka server success {}", kafkaResource.getIp());
            Set<String> serverTopicNames = adminClient.listTopics().names().get();
            Map<String, KafkaTopic> topicMap = fetchServerTopics(kafkaResource, adminClient, serverTopicNames);
            Map<String, KafkaTopic> dbTopics = queryDbTopics(kafkaResource);
            Set<String> dbTopicNames = dbTopics.keySet();
            //db中没有，server中有，新增
            syncAdd(serverTopicNames, topicMap, dbTopicNames);
            //db中有，server中没有，不存在
            syncNotExisted(serverTopicNames, dbTopicNames);
            //db中有，server中也有，更新
            syncUpdate(serverTopicNames, topicMap, dbTopicNames);
        } catch (InterruptedException | ExecutionException e) {
            e.printStackTrace();
            Thread.currentThread().interrupt();
            throw new RuntimeException(e);
        }
    }

    private Map<String, KafkaTopic> fetchServerTopics(DOSResource kafkaResource, AdminClient adminClient, Set<String> serverTopicNames) {
        Map<String, KafkaTopic> topicMap = new HashMap<>();
        DescribeTopicsResult topicsResult = adminClient.describeTopics(serverTopicNames);
        topicsResult.values().forEach((k, v) -> {
            try {
                TopicDescription topicDescription = v.get();
                topicMap.put(topicDescription.name(), KafkaTopic.builder()
                        .name(topicDescription.name())
                        .partitions(topicDescription.partitions().size())
                        .replicas(topicDescription.partitions().get(0).replicas().size())
                        .info(topicDescription.toString())
                        .resourceId(kafkaResource.getId())
                        .existOnServer(true)
                        .build());
            } catch (InterruptedException | ExecutionException e) {
                e.printStackTrace();
                Thread.currentThread().interrupt();
            }
        });
        return topicMap;
    }

    private Map<String, KafkaTopic> queryDbTopics(DOSResource kafkaResource) {
        return kafkaTopicService.lambdaQuery()
                .eq(KafkaTopic::getResourceId, kafkaResource.getId())
                .list()
                .stream()
                .collect(Collectors.toMap(KafkaTopic::getName, DOSKafkaTopic -> DOSKafkaTopic));
    }

    private void syncAdd(Set<String> serverTopicNames, Map<String, KafkaTopic> topicMap, Set<String> dbTopicNames) {
        Set<String> addTopics = new HashSet<>(serverTopicNames);
        addTopics.removeAll(dbTopicNames);
        kafkaTopicService.saveBatch(addTopics.stream().map(topicMap::get).collect(Collectors.toList()));
    }

    private void syncNotExisted(Set<String> serverTopicNames, Set<String> dbTopicNames) {
        Set<String> notExistTopics = new HashSet<>(dbTopicNames);
        notExistTopics.removeAll(serverTopicNames);
        notExistTopics.forEach(topicName -> kafkaTopicService.lambdaUpdate()
                .eq(KafkaTopic::getName, topicName)
                .set(KafkaTopic::getExistOnServer, false)
                .set(KafkaTopic::getUpdateTime, LocalDateTime.now())
                .update());
    }

    private void syncUpdate(Set<String> serverTopicNames, Map<String, KafkaTopic> topicMap, Set<String> dbTopicNames) {
        Set<String> updateTopics = new HashSet<>(serverTopicNames);
        updateTopics.retainAll(dbTopicNames);
        updateTopics.forEach(topicName -> kafkaTopicService.lambdaUpdate()
                .eq(KafkaTopic::getName, topicName)
                .set(KafkaTopic::getReplicas, topicMap.get(topicName).getReplicas())
                .set(KafkaTopic::getPartitions, topicMap.get(topicName).getPartitions())
                .set(KafkaTopic::getInfo, topicMap.get(topicName).getInfo())
                .set(KafkaTopic::getExistOnServer, true)
                .set(KafkaTopic::getUpdateTime, LocalDateTime.now())
                .update());
    }
}
