package com.changdu.seatunnel.admin.util;

import java.time.LocalDateTime;
import java.time.format.DateTimeFormatter;
import java.util.ArrayList;
import java.util.Collections;
import java.util.List;
import java.util.Map;
import java.util.Objects;
import java.util.Set;
import java.util.regex.Pattern;
import java.util.stream.Collectors;

import org.apache.commons.lang3.StringUtils;
import org.springframework.util.CollectionUtils;

import com.changdu.seatunnel.admin.config.enums.SyncKafkaSchemaStatus;
import com.changdu.seatunnel.admin.config.enums.SyncTaskStatus;
import com.changdu.seatunnel.admin.config.enums.SyncTaskType;
import com.changdu.seatunnel.admin.config.enums.SyncType;
import com.changdu.seatunnel.admin.entity.DataCenter;
import com.changdu.seatunnel.admin.entity.DataInstance;
import com.changdu.seatunnel.admin.entity.DataKafkaTemplate;
import com.changdu.seatunnel.admin.entity.DataTemplate;
import com.changdu.seatunnel.admin.entity.SyncConfig;
import com.changdu.seatunnel.admin.entity.SyncTask;
import com.changdu.seatunnel.admin.pojo.CopySyncConfigGenerate.DefaultValue;
import com.changdu.seatunnel.admin.pojo.CopySyncConfigGenerate.ReplaceRegex;
import com.changdu.seatunnel.admin.pojo.FieldMapping;
import com.changdu.seatunnel.admin.pojo.KafkaConfig;
import com.changdu.seatunnel.admin.pojo.ShowTaskTables;
import com.changdu.seatunnel.admin.pojo.SourceConfig;
import com.changdu.seatunnel.admin.pojo.SyncConfigGenerate;
import com.changdu.seatunnel.admin.pojo.TableInfo;
import com.changdu.seatunnel.admin.pojo.TableMapping;
import com.changdu.seatunnel.admin.pojo.TargetConfig;
import com.changdu.seatunnel.admin.service.DataCenterService;
import com.changdu.seatunnel.admin.service.DataTemplateService;
import com.changdu.seatunnel.admin.service.SyncConfigService;
import com.fasterxml.jackson.databind.JsonNode;
import com.fasterxml.jackson.databind.node.ArrayNode;

import cn.hutool.extra.spring.SpringUtil;
import lombok.extern.slf4j.Slf4j;

@Slf4j
public class SyncConfigUtils {

	public static final String TOPIC_VERSION = "v3";
	public static final String START_MODE = "startup.mode";
	public static final String STOP_MODE = "stop.mode";
	
	public static SyncConfig build(SyncConfigGenerate configData) {
		// 将配置数据转换为SyncConfig对象
    	SyncConfig config = new SyncConfig();
    	// 设置源数据库配置
    	config.setSourceTemplate(configData.getSourceConfig().getTemplateId());
    	config.setSourceDataCenter(configData.getSourceConfig().getDataCenter());
    	config.setSourceInstance(configData.getSourceConfig().getInstance());
    	config.setSourceDatabases(JsonUtils.toJson(configData.getSourceConfig().getDatabases()));
    	config.setSourceTables(JsonUtils.toJson(configData.getSourceConfig().getSelectedTables()));
    	// 设置目标数据库配置
    	config.setTargetTemplate(configData.getTargetConfig().getTemplateId());
    	config.setTargetDataCenter(configData.getTargetConfig().getDataCenter());
    	config.setTargetInstance(configData.getTargetConfig().getInstance());
    	config.setTargetDatabases(JsonUtils.toJson(configData.getTargetConfig().getDatabases()));
    	config.setTargetTables(JsonUtils.toJson(configData.getTargetConfig().getSelectedTables()));
    	// 设置表映射配置
    	config.setTableMappings(JsonUtils.toJson(configData.getTableMappings()));
    	config.setStatus(1);
        String now = LocalDateTime.now().format(DateTimeFormatter.ofPattern("yyyy-MM-dd HH:mm:ss"));
        config.setCreateTime(now);
        config.setUpdateTime(now);
        
        KafkaConfig kafkaConfig = configData.getKafkaConfig();
        if (Objects.nonNull(kafkaConfig)) {
        	config.setKafkaTopic(kafkaConfig.getKafkaTopic());
        	config.setKafkaPartition(kafkaConfig.getKafkaPartition());
        	config.setKafkaConsumerGroup(kafkaConfig.getKafkaConsumerGroup());
        }
		return config;
	}

	/**
	 * 输出 json 响应
	 * @param config
	 * @return
	 */
	public static SyncConfigGenerate toSyncConfigGenerate(SyncConfig config) {
		SyncConfigGenerate configGenerate = new SyncConfigGenerate();
		configGenerate.setId(config.getId());
		
		SourceConfig sourceConfig = new SourceConfig();
		sourceConfig.setTemplateId(config.getSourceTemplate());
		sourceConfig.setDataCenter(config.getSourceDataCenter());
		sourceConfig.setInstance(config.getSourceInstance());
		sourceConfig.setDatabases(JsonUtils.fromJsonArrayAsList(JsonUtils.getJsonNode(config.getSourceDatabases().getBytes()), String.class));
		sourceConfig.setSelectedTables(JsonUtils.fromJsonArrayAsList(JsonUtils.getJsonNode(config.getSourceTables().getBytes()), TableInfo.class));
		configGenerate.setSourceConfig(sourceConfig);

		TargetConfig targetConfig = new TargetConfig();
		targetConfig.setTemplateId(config.getTargetTemplate());
		targetConfig.setDataCenter(config.getTargetDataCenter());
		targetConfig.setInstance(config.getTargetInstance());
		targetConfig.setDatabases(JsonUtils.fromJsonArrayAsList(JsonUtils.getJsonNode(config.getTargetDatabases().getBytes()), String.class));
		targetConfig.setSelectedTables(JsonUtils.fromJsonArrayAsList(JsonUtils.getJsonNode(config.getTargetTables().getBytes()), TableInfo.class));
		configGenerate.setTargetConfig(targetConfig);
		configGenerate.setTableMappings(JsonUtils.fromJsonArrayAsList(JsonUtils.getJsonNode(config.getTableMappings().getBytes()), TableMapping.class));
		if (StringUtils.isNoneBlank(config.getKafkaTopic())) {
			configGenerate.setKafkaConfig(KafkaConfig.build(config.getKafkaTopic(), config.getKafkaPartition(), config.getKafkaConsumerGroup())); 
		}
		return configGenerate;
	}
	
	/**
	 * 生成任务配置
	 * @param template 
	 * @param config
	 * @return
	 */
	public static List<SyncTask> createSourceTaskWithTemplate(DataInstance sourceInstance, DataInstance targetInstance, String template, SyncConfigGenerate config) {
		List<SyncTask> collectors = new ArrayList<>();
		SyncType channel = parseChannel(template);
		log.info("CreateSourceTaskWithTemplate Channel: {}", channel);
		
		DataCenter dataCenter = SpringUtil.getBean(DataCenterService.class).getByCode(sourceInstance.getDataCenter());
		JsonNode mysqlTemplate = null;
		switch (channel) {
			case MYSQL_KAFKA: {
				mysqlTemplate = MysqlToKafkaTempltes.createTemplate(sourceInstance, template, config);
				collectors.add(buildTask(channel, mysqlTemplate, dataCenter));
				break;
			}
			case TIDB_KAFKA: {
				break;
			}
			case TICDC_KAFKA: {
				break;
			}
			case TIDB_STARROCKS: {
				for (TableMapping tableMapping : config.getTableMappings()) {
					mysqlTemplate = TiDBToStarrocksTempltes.createTemplate(sourceInstance, targetInstance, tableMapping, template, config);
					collectors.add(buildTask(channel, mysqlTemplate, triageTiDbDataCenter(dataCenter)));
				}
				break;
			}
			case MYSQL_STARROCKS: {
				break;
			}
			default: break;
		}
		return collectors;
	}

	/**
	 * 这里将 fzidc tidb 的全量任务迁移至预发布环境上去，避免以后发生资源大规模挤兑
	 * @param dataCenter
	 * @return
	 */
	private static DataCenter triageTiDbDataCenter(DataCenter dataCenter) {
		if (dataCenter.getCode().equals("fzidc")) {
			return SpringUtil.getBean(DataCenterService.class).getByCode("idc-stage");
		}
		return dataCenter;
	}

	/**
	 * 构建任务对象
	 * @param channel
	 * @param mysqlTemplate
	 * @return
	 */
	private static SyncTask buildTask(SyncType channel, JsonNode mysqlTemplate, DataCenter dataCenter) {
		SyncTask syncTask = new SyncTask();
		syncTask.setName(mysqlTemplate.get("env").get("job.name").asText());
		syncTask.setSourcePlugin(channel.getSource());
		syncTask.setSinkPlugin(channel.getSink());
		syncTask.setStatus(SyncTaskStatus.PENDING.getCode());
		syncTask.setTemplateContent(JsonUtils.toJson(mysqlTemplate));
		syncTask.setDomain(dataCenter.getDomain());
		syncTask.setShowName(syncTask.getName());
		SyncTaskType sourceSyncType = SyncConfigUtils.parseSourceSyncType(mysqlTemplate);
		syncTask.setTaskType(sourceSyncType.getCode());
		return syncTask;
	}

	/**
	 * kafka -> starrocks 任务将分为主任务与子任务之分
	 * @param template
	 * @param config
	 * @return
	 */
	public static List<SyncTask> createTargetTaskWithTemplate(DataInstance sourceInstance, DataInstance targetInstance, String template, SyncConfigGenerate config) {
		SyncType channel = parseChannel(template);
		log.info("createTargetTaskWithTemplate Channel: {}", channel);
		if (SyncType.KAFKA_STARROCKS != channel) return Collections.emptyList();
		
		List<SyncTask> collectors = new ArrayList<>();
		
		// 主任务按库进行拆分
		Set<String> databases = config.getSourceConfig()
				.getSelectedTables().stream().map(t -> t.getDatabase())
				.collect(Collectors.toSet());
		
		DataCenter dataCenter = SpringUtil.getBean(DataCenterService.class).getByCode(targetInstance.getDataCenter());
		// 按库拆分任务
		if (Objects.isNull(config.getKafkaConfig())) {
			for (String database : databases) {
				JsonNode kafkaTemplate = KafkaToStarrocksTempltes.createMainTemplate(targetInstance, database, template, config);
				SyncTask syncTask = new SyncTask();
				syncTask.setName(kafkaTemplate.get("env").get("job.name").asText());
				syncTask.setSourcePlugin(channel.getSource());
				syncTask.setSinkPlugin(channel.getSink());
				syncTask.setStatus(SyncTaskStatus.PENDING.getCode());
				syncTask.setTemplateContent(JsonUtils.toJson(kafkaTemplate));
				syncTask.setDomain(dataCenter.getDomain());
				syncTask.setShowName(syncTask.getName());
				syncTask.setTaskType(SyncTaskType.INCREMENT.getCode());
				collectors.add(syncTask);
			}
		} 
		// 统一聚合任务
		else {
			JsonNode kafkaTemplate = KafkaToStarrocksTempltes.createMainTemplate(targetInstance, null, template, config);
			SyncTask syncTask = new SyncTask();
			syncTask.setName(kafkaTemplate.get("env").get("job.name").asText());
			syncTask.setSourcePlugin(channel.getSource());
			syncTask.setSinkPlugin(channel.getSink());
			syncTask.setStatus(SyncTaskStatus.PENDING.getCode());
			syncTask.setTemplateContent(JsonUtils.toJson(kafkaTemplate));
			syncTask.setDomain(dataCenter.getDomain());
			syncTask.setShowName(syncTask.getName());
			syncTask.setTaskType(SyncTaskType.INCREMENT.getCode());
			collectors.add(syncTask);
		}
		
		// 子任务按表进行拆分
		DataTemplate subTemplate = SpringUtil.getBean(DataTemplateService.class).getByName(KafkaToStarrocksTempltes.SAFKA_SUB_SCHEMA);
		for (TableMapping tableMapping : config.getTableMappings()) {
			JsonNode tableKafkaTemplate = KafkaToStarrocksTempltes.createSubTemplate(targetInstance, tableMapping, subTemplate.getTemplate(), config);
			DataKafkaTemplate syncSchema = new DataKafkaTemplate();
			syncSchema.setName(tableKafkaTemplate.get("tablePath").asText());
			syncSchema.setSourcePlugin(channel.getSource());
			syncSchema.setSinkPlugin(channel.getSink());
			syncSchema.setStatus(SyncKafkaSchemaStatus.PENDING.getCode());
			syncSchema.setTemplateContent(JsonUtils.toJson(tableKafkaTemplate));
			syncSchema.setSourceDataCenter(config.getSourceConfig().getDataCenter());
			syncSchema.setSourceInstance(config.getSourceConfig().getInstance());
			syncSchema.setSourceDatabase(tableMapping.getSourceTable().substring(0, tableMapping.getSourceTable().indexOf(".")));
			syncSchema.setSourceTable(tableMapping.getSourceTable().substring(tableMapping.getSourceTable().indexOf(".") + 1));
			syncSchema.setTargetDataCenter(config.getTargetConfig().getDataCenter());
			syncSchema.setTargetInstance(config.getTargetConfig().getInstance());
			syncSchema.setTargetDatabase(tableMapping.getTargetTable().substring(0, tableMapping.getTargetTable().indexOf(".")));
			syncSchema.setTargetTable(tableMapping.getTargetTable().substring(tableMapping.getTargetTable().indexOf(".") + 1));
			syncSchema.setTopic(tableKafkaTemplate.get("source").get("topic").asText());
			syncSchema.setDomain(dataCenter.getDomain());
			syncSchema.setTaskType(SyncTaskType.INCREMENT.getCode());
			collectors.add(syncSchema);
		}
		return collectors;
	}
	
	/**
	 * 同步通道解析
	 * @param template
	 * @return
	 */
	public static SyncType parseChannel(String template) {
		JsonNode jsonNode = JsonUtils.getJsonNode(template.getBytes());
		return extracted(jsonNode);
	}

	private static SyncType extracted(JsonNode jsonTemplate) {
		SyncType channel = SyncType.fromCode(((ArrayNode) jsonTemplate.get("source")).get(0).get("plugin_name").asText(), 
				((ArrayNode) jsonTemplate.get("sink")).get(0).get("plugin_name").asText());
		return channel;
	}

	/**
	 * 构建SyncConfig并添加ID
	 * @param id
	 * @param configData
	 * @return
	 */
	public static SyncConfig buildById(Long id, SyncConfigGenerate configData) {
		SyncConfig syncConfig = build(configData);
		syncConfig.setId(id);
		return syncConfig;
	}
	
	
	public static SyncTaskType parseSourceSyncType(JsonNode template) {
		JsonNode source = template.get("source").get(0);
		String start_mode = source.has(START_MODE) ? source.get(START_MODE).asText() : "latest";
		String stop_mode = source.has(STOP_MODE) ? source.get(STOP_MODE).asText() : "never";
		return SyncTaskType.fromSyncMode(start_mode, stop_mode);
	}

	/**
	 * 替换现有属性
	 */
	public static SyncConfigGenerate replaceProperties(SyncConfigGenerate generate, 
			ReplaceRegex sourceDbRegex, ReplaceRegex sourceTbRegex, 
			ReplaceRegex targetDbRegex, ReplaceRegex targetTbRegex,
			List<DefaultValue> defaultValues) {
		
		SourceConfig sourceConfig = generate.getSourceConfig();
		if (Objects.nonNull(sourceDbRegex)) {
			List<String> databases = sourceConfig.getDatabases();
			for (int i = 0, len = databases.size(); i < len; i++) {
				databases.set(i, replace(sourceDbRegex, databases.get(i)));
			}
			
			List<TableInfo> selectedTables = sourceConfig.getSelectedTables();
			for (int i = 0, len = selectedTables.size(); i < len; i++) {
				TableInfo tableInfo = selectedTables.get(i);
				tableInfo.setDatabase(replace(sourceDbRegex, tableInfo.getDatabase()));
			}
			
			List<TableMapping> tableMappings = generate.getTableMappings();
			for (int i = 0, len = tableMappings.size(); i < len; i++) {
				TableMapping tableMapping = tableMappings.get(i);
				String[] split = tableMapping.getSourceTable().split("\\.", 2);
				String sourceTable = String.format("%s.%s", replace(sourceDbRegex, split[0]), split[1]);
				tableMapping.setSourceTable(sourceTable);
			}
		}
		
		if (Objects.nonNull(sourceTbRegex)) {
			List<TableMapping> tableMappings = generate.getTableMappings();
			for (int i = 0, len = tableMappings.size(); i < len; i++) {
				TableMapping tableMapping = tableMappings.get(i);
				String[] split = tableMapping.getSourceTable().split("\\.", 2);
				String sourceTable = String.format("%s.%s", split[0], replace(sourceTbRegex, split[1]));
				tableMapping.setSourceTable(sourceTable);
			}
		}
		
		TargetConfig targetConfig = generate.getTargetConfig();
		if (Objects.nonNull(targetDbRegex)) {
			List<String> databases = targetConfig.getDatabases();
			for (int i = 0, len = databases.size(); i < len; i++) {
				databases.set(i, replace(targetDbRegex, databases.get(i)));
			}
			
			List<TableInfo> selectedTables = targetConfig.getSelectedTables();
			for (int i = 0, len = selectedTables.size(); i < len; i++) {
				TableInfo tableInfo = selectedTables.get(i);
				tableInfo.setDatabase(replace(targetDbRegex, tableInfo.getDatabase()));
			}
			
			List<TableMapping> tableMappings = generate.getTableMappings();
			for (int i = 0, len = tableMappings.size(); i < len; i++) {
				TableMapping tableMapping = tableMappings.get(i);
				String[] split = tableMapping.getSourceTable().split("\\.", 2);
				String sourceTable = String.format("%s.%s", replace(targetDbRegex, split[0]), split[1]);
				tableMapping.setSourceTable(sourceTable);
			}
		}
		
		if (Objects.nonNull(targetTbRegex)) {
			List<TableMapping> tableMappings = generate.getTableMappings();
			for (int i = 0, len = tableMappings.size(); i < len; i++) {
				TableMapping tableMapping = tableMappings.get(i);
				String[] split = tableMapping.getSourceTable().split("\\.", 2);
				String sourceTable = String.format("%s.%s", split[0], replace(targetTbRegex, split[1]));
				tableMapping.setSourceTable(sourceTable);
			}
		}
		
		// 替换字段映射
		if (!CollectionUtils.isEmpty(defaultValues)) {
			Map<String, String> defaultReplace = defaultValues.stream().collect(Collectors.toMap(key -> key.getFieldName(), value -> value.getDefaultValue()));
			
			List<TableMapping> tableMappings = generate.getTableMappings();
			for (int i = 0, len = tableMappings.size(); i < len; i++) {
				List<FieldMapping> fieldMappings = tableMappings.get(i).getFieldMappings();
				for (FieldMapping fieldMapping : fieldMappings) {
					if (Objects.nonNull(fieldMapping.getDefaultValue())) {
						String defaultValue = defaultReplace.get(fieldMapping.getTargetField());
						if (Objects.nonNull(defaultValue)) {
							fieldMapping.setDefaultValue(defaultValue);
						}
					}
				}
			}
		}
		
		return generate;
	}

	private static String replace(ReplaceRegex sourceDbRegex, String database) {
		return database.replaceAll(sourceDbRegex.getRegex(), sourceDbRegex.getValue());
	}

	/**
	 * 展示任务表同步列表
	 * @param task
	 * @return
	 */
	public static ShowTaskTables toTableMappings(SyncTask task) {
		ShowTaskTables taskTables = new ShowTaskTables();
		
		JsonNode template = JsonUtils.getJsonNode(task.getTemplateContent().getBytes());
		switch (extracted(template)) {
			case MYSQL_KAFKA: {
				mysqlToKafka(taskTables, template);
				break;
			}
			case TIDB_KAFKA: {
				break;
			}
			case TIDB_STARROCKS: {
				tidbToStarrocks(taskTables, template);
				break;
			}
			case MYSQL_STARROCKS: {
				mysqlToStarrocks(taskTables, template);
				break;
			}
			case KAFKA_STARROCKS: {
				kafkaToStarrocks(task.getSyncConfigId(), taskTables, template);
				break;
			}
			default:
				break;
		}
		return taskTables;
	}

	private static void mysqlToStarrocks(ShowTaskTables taskTables, JsonNode template) {
		taskTables.addSourceHeader(ShowTaskTables.MYSQL);
		ArrayNode tableNames = (ArrayNode) template.get("source").get(0).get("table-names");
		for (JsonNode tableName : tableNames) {
			if (!tableName.isNull()) {
				String[] mapping = tableName.asText().split("\\.");
				taskTables.addSourceTable("-", mapping[0], mapping[1]);
			}
		}
		
		taskTables.addSinkHeader(ShowTaskTables.MYSQL);
		JsonNode sink = template.get("sink").get(0);
		String sinkDatabase = sink.get("database").asText();
		String sinkTable = sink.get("table").asText();
		taskTables.addSinkTable("-", sinkDatabase, sinkTable);
	}

	private static void kafkaToStarrocks(Long syncConfigId, ShowTaskTables taskTables, JsonNode template) {
		taskTables.addSourceHeader(ShowTaskTables.SOURCE_KAFKA);
		JsonNode source = template.get("source").get(0);
		String topic = source.get("topic").asText();
		ArrayNode tableNames = (ArrayNode) source.get("table_schema_includes");
		
		
		taskTables.addSinkHeader(ShowTaskTables.MYSQL);
		SyncConfig syncConfig = SpringUtil.getBean(SyncConfigService.class).getById(syncConfigId);
		List<TableMapping> jsonConverts = JsonUtils.fromJsonAsList(syncConfig.getTableMappings(), TableMapping.class);
		Map<String, TableMapping> tableMappings = jsonConverts.stream().collect(Collectors.toMap(k -> k.getSourceTable(), v -> v, (v1, v2) -> v2));
		for (JsonNode tableName : tableNames) {
			if (tableName.isNull()) continue;
			
			String[] sourceMapping = tableName.asText().split("\\.");
			String sourceDatabase = sourceMapping[1];
			String sourceTable = sourceMapping[2];
			
			
			String sourceKey = String.format("%s.%s", sourceDatabase, sourceTable);
			if (tableMappings.containsKey(sourceKey)) {
				String[] targetMapping = tableMappings.get(sourceKey).getTargetTable().split("\\.");
				taskTables.addSourceTable(topic, sourceDatabase, sourceTable);
				taskTables.addSinkTable("-", targetMapping[0], targetMapping[1]);
				continue;
			}
			
			if (sourceKey.endsWith("*")) {
				Pattern pattern = Pattern.compile(sourceKey);
				for (String sourceTableKey : tableMappings.keySet()) {
					if (pattern.matcher(sourceTableKey).find()) {
						String[] targetMapping = tableMappings.get(sourceTableKey).getTargetTable().split("\\.");
						taskTables.addSourceTable(topic, sourceDatabase, sourceTable);
						taskTables.addSinkTable("-", targetMapping[0], targetMapping[1]);
						continue;
					}
				}
			}
		}
	}

	private static void tidbToStarrocks(ShowTaskTables taskTables, JsonNode template) {
		taskTables.addSourceHeader(ShowTaskTables.MYSQL);
		JsonNode source = template.get("source").get(0);
		String database = source.get("database-name").asText();
		String table = source.get("table-name").asText();
		taskTables.addSourceTable("-", database, table);
		
		taskTables.addSinkHeader(ShowTaskTables.MYSQL);
		JsonNode sink = template.get("sink").get(0);
		String sinkDatabase = sink.get("database").asText();
		String sinkTable = sink.get("table").asText();
		taskTables.addSinkTable("-", sinkDatabase, sinkTable);
	}


	/**
	 * mysql ShowTaskTables
	 * @param taskTables
	 * @param template
	 */
	private static void mysqlToKafka(ShowTaskTables taskTables, JsonNode template) {
		taskTables.addSourceHeader(ShowTaskTables.MYSQL);
		ArrayNode tableNames = (ArrayNode) template.get("source").get(0).get("table-names");
		for (JsonNode tableName : tableNames) {
			if (!tableName.isNull()) {
				String[] mapping = tableName.asText().split("\\.");
				taskTables.addSourceTable("-", mapping[0], mapping[1]);
			}
		}
		
		taskTables.addSinkHeader(ShowTaskTables.SINK_KAFKA);
		ArrayNode sinkTopics = (ArrayNode) template.get("sink").get(0).get("reroute");
		for (JsonNode sinkTopic : sinkTopics) {
			if (!sinkTopic.isNull()) {
				String pattern = sinkTopic.get("pattern").asText();
				String topic = sinkTopic.get("topic").asText();
				String partition = sinkTopic.get("partition").asText();
				taskTables.addSinkTable(pattern, topic, partition);
			}
		}
	}
	
}
