package com.changdu.seatunnel.admin.util;

import java.util.ArrayList;
import java.util.List;
import java.util.Map;
import java.util.stream.Collectors;

import com.changdu.seatunnel.admin.config.enums.StartModeEnum;
import com.changdu.seatunnel.admin.entity.DataInstance;
import com.changdu.seatunnel.admin.pojo.JobParams;
import com.changdu.seatunnel.admin.pojo.SourceConfig;
import com.changdu.seatunnel.admin.pojo.SyncConfigGenerate;
import com.changdu.seatunnel.admin.pojo.TableInfo;
import com.changdu.seatunnel.admin.pojo.TableRouter;
import com.fasterxml.jackson.databind.JsonNode;
import com.fasterxml.jackson.databind.node.ObjectNode;

public class MysqlToKafkaTempltes extends BaseTemplate {

	/**
	 * {
	    "env": {
	        "parallelism": 1,
	        "job.mode": "STREAMING",
	        "job.name": "mysql=>kafka : {{dc_code}}.{{mysql_instance}}",
	        "job.retry.times": "256",
	        "job.retry.interval.seconds": "300",
	        "checkpoint.interval": "20000",
	        "checkpoint.timeout": "600000"
	    },
	    "source": [
	        {
	            "plugin_name": "MySQL-CDC",
	            "driver": "com.mysql.cj.jdbc.Driver",
	            "base-url": "jdbc:mysql://localhost:3306?useSSL=false",
	            "username": "seatunnel",
	            "password": "K9vFw4kvPx",
	            "table-names": [
	                "{{table_names}}"
	            ],
	            "server-time-zone": "UTC",
	            "server-timestamp-zone": "Asia/Shanghai",
	            "connect.timeout.ms": 300000,
	            "connect.max-retries": 32,
	            "startup.mode": "latest",
	            "format": "compatible_debezium_json",
	            "debezium": {
	                "database.server.name": "{{dc_code}}",
	                "key.converter.schemas.enable": false,
	                "value.converter.schemas.enable": false,
	                "connect.keep.alive.interval.ms": 600000,
	                "max.queue.size": 20480,
	                "poll.interval.ms": 100,
	                "max.batch.size": 2048,
	                "heartbeat.topics.prefix": "_heartbeat"
	            },
	            "exactly_once": false,
	            "schema-changes.enabled": true,
	            "snapshot.split.size": 81920
	        }
	    ],
	    "transform": [],
	    "sink": [
	        {
	            "plugin_name": "Kafka",
	            "bootstrap.servers": "ckafka-b4bb543r.ap-hongkong.ckafka.tencentcloudmq.com:50009",
	            "reroute": [
	                "{{table_routers}}"
	            ],
	            "kafka.config": {
	                "acks": "1",
					"request.timeout.ms": 30000,
	                "delivery.timeout.ms": 600000,
	                "max.block.ms": 250000,
	                "max.in.flight.requests.per.connection": 1,
	                "buffer.memory": 1073741824,
	                "batch.size": 2097152,
	                "send.buffer.bytes": 4194304,
	                "max.request.size": 6291456,
	                "linger.ms": "1000",
	                "compression.gzip.level": 6,
	                "compression.type": "gzip",
	                "security.protocol": "SASL_PLAINTEXT",
	                "sasl.mechanism": "PLAIN",
	                "sasl.jaas.config": "org.apache.kafka.common.security.plain.PlainLoginModule required username=\"ckafka-b4bb543r#sttest\" password=\"heidisi@isus\";"
	            },
	            "semantics": "AT_LEAST_ONCE",
	            "format": "compatible_debezium_json"
	        }
	    ]
	}
	 * @param json
	 * @param config
	 * @return
	 */
	public static JsonNode createTemplate(DataInstance sourceInstance, String template, SyncConfigGenerate config) {
		final List<String> tableNames = new ArrayList<>();
		final List<TableRouter> tableRouters = new ArrayList<>();
		
		SourceConfig sourceConfig = config.getSourceConfig();
		// 按database分组
		Map<String, List<TableInfo>> databaseGroup = config.getSourceConfig().getSelectedTables().stream().collect(Collectors.groupingBy(TableInfo::getDatabase));
		for (String database : databaseGroup.keySet()) {
			List<TableInfo> tables = databaseGroup.get(database);
			// 确定当前database所需分区数
			int totalPartition = config.kafkaConfigNotEmpty()
					? config.getKafkaConfig().getKafkaPartition()
					: KafkaToStarrocksTempltes.partitionOfNum(tables.size());
			int partition = 0;
			for (int i = 0, len = tables.size(); i < len; i++) {
				TableInfo table = tables.get(i);
				String tableName = String.format("%s.%s", table.getDatabase(), buildPattern(table));
				String pattern = String.format("%s.%s.%s", sourceConfig.getDataCenter(), table.getDatabase(), buildPattern(table));
				String topic = config.kafkaConfigNotEmpty()
						? config.getKafkaConfig().getKafkaTopic()
						: String.format("%s.%s.%s_%s", sourceConfig.getDataCenter(), sourceConfig.getInstance(), table.getDatabase(), SyncConfigUtils.TOPIC_VERSION);
				int currentPartition = partition;
				
				// table_names
				tableNames.add(tableName);
				// table_router
				tableRouters.add(new TableRouter(pattern, topic, currentPartition));
				if ( ++ partition >= totalPartition) {
					partition = 0;
				}
			}
		}
		
        String content = template
    		.replace("{{source_dc_code}}", sourceConfig.getDataCenter())
            .replace("{{source_instance}}", sourceConfig.getInstance())
            .replace("{{source_host}}", toString(sourceInstance.getHost(), ""))
            .replace("{{source_port}}", toString(sourceInstance.getPort(), ""))
            .replace("{{source_username}}", toString(sourceInstance.getUsername(), ""))
            .replace("{{source_password}}", toString(sourceInstance.getPassword(), ""))
            .replace("\"{{source_table_names}}\"", JsonUtils.toJson(tableNames))
            .replace("\"{{source_table_routers}}\"", JsonUtils.toJson(tableRouters))
            .replace("{{source_dc_code}}", sourceConfig.getDataCenter());
        return JsonUtils.getJsonNode(content.getBytes());
	}

	private static String buildPattern(TableInfo table) {
		if (table.isRegex()) {
			String tableName = table.getName();
			return tableName.substring(0, DatabaseUtil.findRegexIndex(tableName)).concat("*");
		}
		return table.getName();
	}

	/**
	 * 重新构建模板内容
	 * @param templateContent
	 * @param jobParam
	 * @return
	 */
	public static String reCreateTemplate(String templateContent, JobParams jobParam) {
		JsonNode template = JsonUtils.fromJson(templateContent, JsonNode.class);
		ObjectNode source = (ObjectNode) template.get("source").get(0);
		
		StartModeEnum startMode = jobParam.getConsumeMode();
		if (StartModeEnum.INITIAL == startMode) {
			source.put("startup.mode", StartModeEnum.INITIAL.getMode());
		}
		if (StartModeEnum.LATEST == startMode) {
			source.put("startup.mode", StartModeEnum.LATEST.getMode());
		}
		if (StartModeEnum.SPECIFIC == startMode) {
			source.put("startup.mode", StartModeEnum.SPECIFIC.getMode());
			source.put("startup.specific-offset.file", jobParam.getBinlogFile());
			source.put("startup.specific-offset.pos", "4");
		}
		return JsonUtils.toJson(template);
	}

}
