package com.changdu.seatunnel.admin.service;

import java.time.LocalDateTime;
import java.util.Arrays;
import java.util.List;
import java.util.Map;
import java.util.Objects;
import java.util.stream.Collectors;

import org.apache.commons.lang3.StringUtils;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Service;

import com.baomidou.mybatisplus.core.conditions.query.LambdaQueryWrapper;
import com.baomidou.mybatisplus.extension.plugins.pagination.Page;
import com.baomidou.mybatisplus.extension.service.impl.ServiceImpl;
import com.changdu.seatunnel.admin.config.common.BusinessException;
import com.changdu.seatunnel.admin.config.enums.SyncErrorStatus;
import com.changdu.seatunnel.admin.entity.DataInstance;
import com.changdu.seatunnel.admin.entity.DataKafkaTemplate;
import com.changdu.seatunnel.admin.entity.DataTemplate;
import com.changdu.seatunnel.admin.entity.SyncConfig;
import com.changdu.seatunnel.admin.entity.SyncTask;
import com.changdu.seatunnel.admin.mapper.SyncConfigMapper;
import com.changdu.seatunnel.admin.pojo.SyncConfigGenerate;
import com.changdu.seatunnel.admin.util.SyncConfigUtils;

@Service
public class SyncConfigService extends ServiceImpl<SyncConfigMapper, SyncConfig> {
    
    @Autowired
    private DataSourceService dataInstanceService;    
    
    @Autowired
    private DataTemplateService dataTemplateService;
    
    @Autowired
    private SyncTaskService syncTaskService;
    
    @Autowired
    private DataKafkaTemplateService dataKafkaTemplateService;
    
    /**
     * 生成任务配置, 跨数据中心任务往往分成两部分
     * 1、mysql -> kafka
     * 2、kafka -> starrocks
     * 3、tidb -> kafka
     * 4、tidb -> starrocks
     * @param config
     */
    public void generateConfigWithId(Long configId, SyncConfigGenerate config) {
    	List<SyncTask> sourceTaskInfo = generate(config);
    	
    	// 保存任务配置到数据库
    	for (SyncTask syncTask : sourceTaskInfo) {
    		syncTask.setSyncConfigId(configId);
    		syncTask.setUpdateTime(LocalDateTime.now().toString());
    		if (syncTask instanceof DataKafkaTemplate dataKafkaTemplate) {
    			dataKafkaTemplateService.saveOrUpdate(dataKafkaTemplate);
    		} else {
    			syncTaskService.saveOrUpdate(syncTask);
    		}
    	}
    }

    /**
     * 生成任务配置信息
     * @param config
     * @return
     */
	private List<SyncTask> generate(SyncConfigGenerate config) {
		// 创建数据源同步任务 mysql -> kafka
    	DataInstance sourceInstance = dataInstanceService.selectByDcCodeAndInstance(config.getSourceConfig().getDataCenter(), config.getSourceConfig().getInstance());
    	if (Objects.isNull(sourceInstance)) throw new BusinessException(SyncErrorStatus.TEMPLATE_NOT_FOUND);
    	
    	// 创建目标同步任务 kafka -> starrocks
    	DataInstance targetInstance = dataInstanceService.selectByDcCodeAndInstance(config.getTargetConfig().getDataCenter(), config.getTargetConfig().getInstance());
    	if (Objects.isNull(targetInstance)) throw new BusinessException(SyncErrorStatus.TEMPLATE_NOT_FOUND);
    	
    	// 创建源任务模板
    	DataTemplate sourceTemplate = dataTemplateService.getById(config.getSourceConfig().getTemplateId());
    	List<SyncTask> sourceTaskInfo = SyncConfigUtils.createSourceTaskWithTemplate(sourceInstance, targetInstance, sourceTemplate.getTemplate(), config);
    	
    	// 创建目标任务模板
		if (config.getSourceConfig().getTemplateId() != config.getTargetConfig().getTemplateId()) {
			DataTemplate targetTemplate = dataTemplateService.getById(config.getTargetConfig().getTemplateId());
			List<SyncTask> targetTaskInfo = SyncConfigUtils.createTargetTaskWithTemplate(sourceInstance, targetInstance, targetTemplate.getTemplate(), config);
			sourceTaskInfo.addAll(targetTaskInfo);
		}
		return sourceTaskInfo;
	}

    /**
     * 任务搜索
     * @param dataCenter
     * @param instance
     * @param database
     * @return
     */
	public List<SyncConfig> selectSyncConfigs(String dataCenter, String instance, String database, String tableName) {
		if (StringUtils.isBlank(dataCenter) 
				&& StringUtils.isBlank(instance) 
				&& StringUtils.isBlank(database)
				&& StringUtils.isBlank(tableName)) {
			return this.list();
		}
		
		LambdaQueryWrapper<SyncConfig> sourceWrapper = new LambdaQueryWrapper<>();
		sourceWrapper.eq(StringUtils.isNotBlank(dataCenter), SyncConfig::getSourceDataCenter, dataCenter);
		sourceWrapper.eq(StringUtils.isNotBlank(instance), SyncConfig::getSourceInstance, instance);
		sourceWrapper.apply(StringUtils.isNotBlank(database), "JSON_CONTAINS(source_databases, {0})", String.format("\"%s\"", database));
		if (StringUtils.isNoneBlank(tableName)) {
			sourceWrapper.apply(String.format("JSON_SEARCH(source_tables, 'one', '%%%s%%', NULL, '$[*].name') IS NOT NULL", tableName));
		}
		List<SyncConfig> sourceConfigs = this.list(sourceWrapper);
    	
    	LambdaQueryWrapper<SyncConfig> targetWrapper = new LambdaQueryWrapper<>();
    	targetWrapper.eq(StringUtils.isNotBlank(dataCenter), SyncConfig::getTargetDataCenter, dataCenter);
    	targetWrapper.eq(StringUtils.isNotBlank(instance), SyncConfig::getTargetInstance, instance);
    	targetWrapper.apply(StringUtils.isNotBlank(database), "JSON_CONTAINS(target_databases, {0})", String.format("\"%s\"", database));
    	if (StringUtils.isNoneBlank(tableName)) {
    		targetWrapper.apply(String.format("JSON_SEARCH(target_tables, 'one', '%%%s%%', NULL, '$[*].name') IS NOT NULL", tableName));
		}
    	List<SyncConfig> targetConfigs = this.list(targetWrapper);
    	
    	sourceConfigs.addAll(targetConfigs);
    	return sourceConfigs;
	}
	
	/**
	 * 重新生成任务模板配置，并更新任务数据
	 */
	public void updateGenerateConfigWithId(Long configId, SyncConfigGenerate config) {
		Map<String, SyncTask> syncTasks = syncTaskService.listBySyncConfigIds(Arrays.asList(configId), Page.of(1, Integer.MAX_VALUE, DEFAULT_BATCH_SIZE)).getTasks().stream().collect(Collectors.toMap(SyncTask::getName, v -> v));
		dataKafkaTemplateService.deleteTemplatesBySyncConfigId(configId);
		
    	List<SyncTask> sourceTaskInfo = generate(config);
    	// 对于匹配到任务名称的项，进行更新，否则保存
    	for (SyncTask syncTask : sourceTaskInfo) {
    		syncTask.setSyncConfigId(configId);
    		syncTask.setUpdateTime(LocalDateTime.now().toString());
    		if (syncTask instanceof DataKafkaTemplate dataKafkaTemplate) {
    			dataKafkaTemplateService.saveOrUpdate(dataKafkaTemplate);
    		} else {
    			SyncTask old_syncTask = syncTasks.get(syncTask.getName());
    			if (Objects.nonNull(old_syncTask)) {
    				old_syncTask.setSinkPlugin(syncTask.getSinkPlugin());
    				old_syncTask.setSourcePlugin(syncTask.getSourcePlugin());
    				old_syncTask.setTemplateContent(syncTask.getTemplateContent());
    				old_syncTask.setDomain(syncTask.getDomain());
    				old_syncTask.setUpdateTime(syncTask.getUpdateTime());
    				syncTaskService.updateById(old_syncTask);
    			} else {
    				syncTaskService.saveOrUpdate(syncTask);
    			}
    		}
    	}
	}

	/**
	 * 检查是否有重名的 kafka 及 消费组
	 * @param kafkaTopic
	 * @param kafkaConsumer
	 * @return
	 */
	public long countKafkaWithConsumer(String kafkaTopic, String kafkaConsumerGroup) {
		if (StringUtils.isEmpty(kafkaTopic)) return 0;
		
		LambdaQueryWrapper<SyncConfig> query = new LambdaQueryWrapper<>();
		query.eq(StringUtils.isNotBlank(kafkaTopic), SyncConfig::getKafkaTopic, kafkaTopic);
		query.eq(StringUtils.isNotBlank(kafkaConsumerGroup), SyncConfig::getKafkaConsumerGroup, kafkaConsumerGroup);
		return count(query);
	}
	
} 