package com.devops.admin.service.impl;

import cn.hutool.core.bean.BeanUtil;
import cn.hutool.core.collection.CollUtil;
import com.baomidou.mybatisplus.core.conditions.query.LambdaQueryWrapper;
import com.baomidou.mybatisplus.extension.plugins.pagination.Page;
import com.baomidou.mybatisplus.extension.service.impl.ServiceImpl;
import com.devops.admin.mapper.DOSProjectMapper;
import com.devops.admin.model.*;
import com.devops.admin.po.DOSBindStatusProject;
import com.devops.admin.po.DOSProjectPo;
import com.devops.admin.po.KafkaTopicPo;
import com.devops.admin.service.KafkaTopicProjectRelationService;
import com.devops.admin.service.KafkaTopicService;
import com.devops.admin.mapper.KafkaTopicMapper;
import com.devops.admin.service.DOSResourceService;
import com.devops.admin.temporal.workflow.SyncKafkaTopicWorkflow;
import com.devops.admin.util.Assert;
import com.devops.admin.vo.DOSPageVo;
import com.devops.constant.DevOpsConstant;
import com.devops.admin.vo.KafkaTopicVo;
import com.google.common.collect.Lists;
import io.temporal.client.WorkflowClient;
import io.temporal.client.WorkflowOptions;
import lombok.extern.slf4j.Slf4j;
import org.apache.kafka.clients.admin.AdminClient;
import org.apache.kafka.clients.admin.AdminClientConfig;
import org.apache.kafka.clients.admin.KafkaAdminClient;
import org.apache.kafka.clients.admin.NewTopic;
import org.springframework.stereotype.Service;
import org.springframework.transaction.annotation.Transactional;

import javax.annotation.Resource;
import java.time.Duration;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.stream.Collectors;

/**
* @author Dell
* @description 针对表【dos_kafka_topic】的数据库操作Service实现
* @createDate 2023-05-17 15:09:47
*/
@Slf4j
@Service
public class KafkaTopicServiceImpl extends ServiceImpl<KafkaTopicMapper, KafkaTopic>
    implements KafkaTopicService {
    @Resource
    WorkflowClient workflowClient;
    @Resource
    DOSResourceService resourceService;
    @Resource
    KafkaTopicProjectRelationService kafkaTopicProjectRelationService;
    @Resource
    DOSProjectMapper dosProjectMapper;
    @Override
    public void syncKafkaTopic() {
        WorkflowOptions options = WorkflowOptions.newBuilder()
                .setTaskQueue(DevOpsConstant.SYNC_KAFKA_TOPIC_WORKFLOW_TASK_QUEUE)
                .setWorkflowId(String.valueOf(System.currentTimeMillis()))
                .setWorkflowTaskTimeout(Duration.ofMinutes(1))
                .setWorkflowExecutionTimeout(Duration.ofHours(1))
                .setWorkflowRunTimeout(Duration.ofHours(1))
                .build();
        SyncKafkaTopicWorkflow workflow = workflowClient.newWorkflowStub(SyncKafkaTopicWorkflow.class, options);
        workflow.syncKafkaTopic();
    }

    @Override
    public TableResult<KafkaTopicVo> getKafkaTopicList(KafkaTopicPo kafkaTopicPo, DOSPageVo pageVo) {
        Page<KafkaTopicPo> page = new Page<>(pageVo.getPageNum(), pageVo.getPageSize());
        List<KafkaTopicVo> kafkaTopicList = baseMapper.selectKafkaTopicPage(kafkaTopicPo, page);
        queryTopicProjects(kafkaTopicList);
        Pagination p = Pagination.builder().pageNum(page.getCurrent()).pageSize(page.getSize()).total(page.getTotal()).build();
        return new TableResult<>(kafkaTopicList, p);
    }

    private void queryTopicProjects(List<KafkaTopicVo> kafkaTopicList) {
        List<Integer> kafkaTopicIds = kafkaTopicList.stream().map(KafkaTopicVo::getId).collect(Collectors.toList());
        if (CollUtil.isEmpty(kafkaTopicIds)) {
            return;
        }
        List<KafkaTopicProjectRelation> topicProjectRelationList =
                kafkaTopicProjectRelationService.lambdaQuery()
                        .in(KafkaTopicProjectRelation::getTopicId, kafkaTopicIds)
                        .list();
        List<Integer> projectIds =topicProjectRelationList.stream()
                .map(KafkaTopicProjectRelation::getProjectId)
                .distinct()
                .collect(Collectors.toList());
        if (CollUtil.isEmpty(projectIds)) {
            return;
        }
        List<DOSProject> projectList = dosProjectMapper.selectBatchIds(projectIds);
        Map<Integer, DOSProject> projectMap = projectList.stream()
                .collect(Collectors.toMap(DOSProject::getId, project -> project));
        Map<Integer, List<DOSProject>> kafkaTopicIdProjectMap = topicProjectRelationList.stream()
                .collect(Collectors.groupingBy(KafkaTopicProjectRelation::getTopicId,
                        Collectors.mapping(relation -> projectMap.get(relation.getProjectId()), Collectors.toList())));
        kafkaTopicList.forEach(kafkaTopic -> kafkaTopic.setProjects(kafkaTopicIdProjectMap.getOrDefault(kafkaTopic.getId(), new ArrayList<>())));
    }

    @Transactional(rollbackFor = Exception.class)
    @Override
    public Boolean createKafkaTopic(KafkaTopicPo kafkaTopicPo) {
        //验证topic名称是否重复
        List<KafkaTopic> sameNameTopics = lambdaQuery().eq(KafkaTopic::getName, kafkaTopicPo.getName())
                .eq(KafkaTopic::getResourceId, kafkaTopicPo.getResourceId())
                .list();
        Assert.isTrue(sameNameTopics.isEmpty(), "topic名称重复");
        //kafka创建topic
        createTopicOnServer(kafkaTopicPo);
        //db创建topic
        KafkaTopic kafkaTopic = new KafkaTopic();
        BeanUtil.copyProperties(kafkaTopicPo, kafkaTopic);
        baseMapper.insert(kafkaTopic);
        //项目关联topic
        return relateProjects(kafkaTopicPo.getProjectIds(), kafkaTopic.getId());
    }

    @Override
    public List<KafkaTopicVo> getKafkaTopicsByProject(Integer projectId) {
        return baseMapper.selectKafkaTopicByProjectId(projectId);
    }

    @Override
    public KafkaTopicVo getKafkaTopicById(Integer topicId) {
        KafkaTopicVo kafkaTopicVo = new KafkaTopicVo();
        KafkaTopic kafkaTopic = baseMapper.selectById(topicId);
        BeanUtil.copyProperties(kafkaTopic, kafkaTopicVo);
        List<DOSProject> dosProjects = dosProjectMapper.selectProjectsByTopicId(topicId);
        List<Integer> projectIds = dosProjects.stream().map(DOSProject::getId).collect(Collectors.toList());
        kafkaTopicVo.setProjectIds(projectIds);
        kafkaTopicVo.setProjects(dosProjects);
        kafkaTopicVo.setResourceName(resourceService.getById(kafkaTopic.getResourceId()).getName());
        return kafkaTopicVo;
    }

    @Transactional(rollbackFor = Exception.class)
    @Override
    public Boolean relateKafkaTopicToProjects(Integer topicId, List<Integer> projectIds) {
        kafkaTopicProjectRelationService.remove(new LambdaQueryWrapper<KafkaTopicProjectRelation>()
                .eq(KafkaTopicProjectRelation::getTopicId, topicId));
        return relateProjects(projectIds, topicId);
    }

    @Override
    public List<DOSBindStatusProject> getProjectsBindStatusByKafkaTopicId(Integer topicId) {
        List<DOSProject> projectList = dosProjectMapper.selectList(new LambdaQueryWrapper<>());
        DOSBindStatusProject noBindProjects =DOSBindStatusProject.builder().bindStatus(0).projects(new ArrayList<>()).build();
        DOSBindStatusProject bindProjects =DOSBindStatusProject.builder().bindStatus(1).projects(new ArrayList<>()).build();
        List<Integer> topicProjectIds =
                dosProjectMapper.selectProjectsByTopicId(topicId).stream().map(DOSProject::getId).collect(Collectors.toList());
        projectList.forEach(project -> {
            DOSProjectPo projectPo = new DOSProjectPo();
            BeanUtil.copyProperties(project, projectPo);
            if (topicProjectIds.contains(project.getId())) {
                bindProjects.getProjects().add(projectPo);
            } else {
                noBindProjects.getProjects().add(projectPo);
            }
        });
        List<DOSBindStatusProject> result = new ArrayList<>();
        result.add(noBindProjects);
        result.add(bindProjects);
        return result;
    }

    @Override
    public Boolean updateKafkaTopicDescription(Integer topicId, String description) {
        return lambdaUpdate()
                .set(KafkaTopic::getDescription, description)
                .eq(KafkaTopic::getId, topicId)
                .update();
    }

    private void createTopicOnServer(KafkaTopicPo kafkaTopicPo) {
        DOSResource kafkaResource = resourceService.getBaseMapper().selectById(kafkaTopicPo.getResourceId());
        //客户端连接服务端，创建topic
        HashMap<String, Object> configs = new HashMap<>();
        configs.put(AdminClientConfig.BOOTSTRAP_SERVERS_CONFIG, kafkaResource.getIp());
        configs.put(AdminClientConfig.REQUEST_TIMEOUT_MS_CONFIG, 30000);
        try (AdminClient adminClient = KafkaAdminClient.create(configs)) {
            log.info("connect kafka server success {}", kafkaResource.getIp());
            NewTopic newTopic = new NewTopic(kafkaTopicPo.getName(), kafkaTopicPo.getPartitions(),
                    kafkaTopicPo.getReplicas().shortValue());
            adminClient.createTopics(Lists.newArrayList(newTopic));
        } catch (Exception e) {
            log.error("create kafka topic error", e);
            throw new RuntimeException("create kafka topic error", e);
        }
    }

    private boolean relateProjects(List<Integer> projectIds, Integer topicId) {
        if (CollUtil.isEmpty(projectIds)) {
            return true;
        }
        List<KafkaTopicProjectRelation> topicProjectRelations = new ArrayList<>();
        projectIds.forEach(projectId -> {
            KafkaTopicProjectRelation topicProjectRelation = new KafkaTopicProjectRelation();
            topicProjectRelation.setTopicId(topicId);
            topicProjectRelation.setProjectId(projectId);
            topicProjectRelations.add(topicProjectRelation);
        });
        return kafkaTopicProjectRelationService.saveBatch(topicProjectRelations);
    }
}




