package com.changdu.seatunnel.admin.util;

import java.math.BigDecimal;
import java.util.LinkedHashMap;
import java.util.List;
import java.util.Map;
import java.util.Objects;
import java.util.Set;
import java.util.stream.Collectors;

import org.apache.commons.lang3.StringUtils;

import com.changdu.seatunnel.admin.config.enums.StartModeEnum;
import com.changdu.seatunnel.admin.entity.DataInstance;
import com.changdu.seatunnel.admin.pojo.FieldMapping;
import com.changdu.seatunnel.admin.pojo.JobParams;
import com.changdu.seatunnel.admin.pojo.SourceConfig;
import com.changdu.seatunnel.admin.pojo.SyncConfigGenerate;
import com.changdu.seatunnel.admin.pojo.TableInfo;
import com.changdu.seatunnel.admin.pojo.TableMapping;
import com.fasterxml.jackson.databind.JsonNode;
import com.fasterxml.jackson.databind.node.ObjectNode;
import com.mysql.cj.MysqlType;

public class KafkaToStarrocksTempltes extends BaseTemplate {

	public static final Integer TOPIC_OF_PARTITION = 3;	// Topic分区数
	public static final Integer TABLE_OF_PARTITION = 10;	// 每个分区最大允许的表数
	public static final String SAFKA_SUB_SCHEMA = "KafkaTableSchema";
	
	/**
	 * {
	    "env": {
	        "parallelism": {{parallelism}},
	        "job.mode": "STREAMING",
	        "job.name": "kafka=>starrocks:{{source_database}}",
	        "job.retry.times": "64",
	        "job.retry.interval.seconds": "300",
	        "checkpoint.interval": "600000",
	        "checkpoint.timeout": "600000"
	    },
	    "source": [
	        {
	            "plugin_name": "Kafka",
	            "schema_dynamic": "true",       // 标记为动态模板，非动态模板还按静态模板进行处理
	            "bootstrap.servers": "ckafka-b4bb543r.ap-hongkong.ckafka.tencentcloudmq.com:50009",
	            "topic": "{{topic}}",	
	            "consumer.queue.size": 10240,
	            "consumer.group": "{{topic}}",
	            "start_mode": "earliest",
	            "kafka.config": {
	                "request.timeout.ms": "240000",
	                "max.poll.records": "12040",
	                "poll.timeout": "180000",
	                "auto.commit.interval.ms": "3000",
	                "max.poll.interval.ms": "600000",
	                "fetch.max.wait.ms": "30000",
	                "socket.connection.setup.timeout.max.ms": "90000",
	                "heartbeat.interval.ms": "3000",
	                "session.timeout.ms": "300000",
	                "security.protocol": "SASL_PLAINTEXT",
	                "sasl.mechanism": "PLAIN",
	                "sasl.jaas.config": "org.apache.kafka.common.security.plain.PlainLoginModule required username=\"ckafka-b4bb543r#sttest\" password=\"heidisi@isus\";"
	            },
	            "format_error_handle_way": "SKIP",
	            "debezium_record_include_schema": "false",
	            "debezium_schema_includes": [
	                "{{source_table_names}}"
	            ],
	            "format": "debezium_json"
	        }
	    ],
	    "transform": [
	        {
	            "plugin_name": "Sql",
	            "schema_dynamic": "true",       // 标记为动态解析
	            "query": "SELECT * from source_table"
	        }
	    ],
	    "sink": [
	        {
	            "plugin_name": "StarRocks",
	            "schema_dynamic": "true",
	            "data_save_mode": "APPEND_DATA",
	            "batch_max_rows": 40960,
	            "batch_max_bytes": "100663296",
	            "max_retries": "17280",
	            "max_retry_backoff_ms": 5000,
	            "retry_backoff_multiplier_ms": 10000,
	            "nodeUrls": [
	                "idc-starrocks-proxysql.changdu.vip:18030"
	            ],
	            "base-url": "jdbc:mysql://idc-starrocks-proxysql.changdu.vip:19030?useSSL=false",
	            "username": "starrocks_insecter",
	            "password": "2zZ@wIqzo@Mi9",
	            "enable_upsert_delete": "true"
	        }
	    ]
	}
	 * @param json
	 * @param config
	 * @return
	 */
	public static JsonNode createMainTemplate(DataInstance targetInstance, String database, 
			String template, SyncConfigGenerate config) {
		SourceConfig sourceConfig = config.getSourceConfig();
		TableInfo targetTableInfo = config.getTargetConfig().getSelectedTables().getFirst();
		Boolean aggregateTask = config.kafkaConfigNotEmpty();
		
		Set<String> mappingTables = config.getTableMappings().stream()
				.map(m -> m.getSourceTable())
				.collect(Collectors.toSet());
		
		List<String> tableNames = sourceConfig.getSelectedTables().stream()
					.filter(t -> aggregateTask ? Boolean.TRUE: mappingTables.contains(String.format("%s.%s", database, t.getName())))
					.map(t -> String.format("%s.%s.%s", sourceConfig.getDataCenter(), t.getDatabase(), builPatternPath(t))).collect(Collectors.toList());
		String topic = config.kafkaConfigNotEmpty() 
				? config.getKafkaConfig().getKafkaTopic()
				: String.format("%s.%s.%s_%s", sourceConfig.getDataCenter(), sourceConfig.getInstance(), database, SyncConfigUtils.TOPIC_VERSION);
		String consumer = aggregateTask && StringUtils.isNoneBlank(config.getKafkaConfig().getKafkaConsumerGroup())
				? config.getKafkaConfig().getKafkaConsumerGroup()
				: String.format("%s.%s", config.getTargetConfig().getDataCenter(), topic);
		
        String content = template
            .replace("{{parallelism}}", aggregateTask ? String.valueOf(config.getKafkaConfig().getKafkaPartition()) : String.valueOf(partitionOfNum(tableNames.size())))
            .replace("{{source_dc_code}}", sourceConfig.getDataCenter())
            .replace("{{source_database}}", aggregateTask ? config.getKafkaConfig().getKafkaTopic() : database)
            .replace("{{source_dc_code}}", sourceConfig.getDataCenter())
            .replace("{{topic}}", topic)
            .replace("{{topic_consumer}}", consumer)
            .replace("\"{{source_table_names}}\"", JsonUtils.toJson(tableNames))
            .replace("{{target_regist_host}}", toString(targetInstance.getRegistHost(), ""))
            .replace("{{target_host}}", toString(targetInstance.getHost(), ""))
            .replace("{{target_port}}", toString(targetInstance.getPort(), ""))
            .replace("{{target_username}}", toString(targetInstance.getUsername(), ""))
            .replace("{{target_password}}", toString(targetInstance.getPassword(), ""))
            .replace("{{target_database}}", targetTableInfo.getDatabase())
            .replace("{{target_table}}", targetTableInfo.getName());            
            ;
        return JsonUtils.getJsonNode(content.getBytes());
	}

	/**
	 * {
	    "tablePath": "video-tke.short_video.account_extend",
	    "source": {
	        "plugin_name": "Kafka",
	        "topic": "video-tke.video-en-mysql-slave.short_video_v2",
	        "partition": 0,
	        "schema": {
	            "fields": {
	                "AccountId": "bigint",
	                "FontSize": "double",
	                "CreatePasswordTime": "timestamp_s",
	                "ThirdLoginTime": "timestamp_s",
	                "UpdateTime": "timestamp_s",
	                "CreateTime": "timestamp_s"
	            }
	        },
	        "format": "debezium_json"
	    },
	    "transform": {
	        "plugin_name": "Sql",
	        "query": "select  CreateTime as dt, AccountId, FontSize, CreatePasswordTime, ThirdLoginTime, UpdateTime, CreateTime, CURRENT_TIMESTAMP as sr_createtime, CURRENT_TIMESTAMP as sr_updatetime from source_table"
	    },
	    "sink": {
	        "plugin_name": "StarRocks",
	        "table": "ods_tidb_short_video_account_extend"
	    }
	}
	 * 创建子任务模板
	 * @param targetInstance
	 * @param database
	 * @param template
	 * @param config
	 * @return
	 */
	public static JsonNode createSubTemplate(DataInstance targetInstance, TableMapping tableMapping, String template, SyncConfigGenerate config) {
		SourceConfig sourceConfig = config.getSourceConfig();
		TableInfo tableInfo = sourceConfig.getSelectedTables().stream().filter(t -> String.format("%s.%s", t.getDatabase(), t.getName()).equals(tableMapping.getSourceTable())).findFirst().get();
		
		String tablePath = String.format("%s.%s.%s", sourceConfig.getDataCenter(), tableInfo.getDatabase(), builPatternPath(tableInfo));
		String topic = (Objects.nonNull(config.getKafkaConfig()) && StringUtils.isNoneBlank((config.getKafkaConfig().getKafkaTopic()))
				? config.getKafkaConfig().getKafkaTopic()
				: String.format("%s.%s.%s_%s", sourceConfig.getDataCenter(), sourceConfig.getInstance(), tableMapping.getSourceTable().substring(0, tableMapping.getSourceTable().indexOf(".")), SyncConfigUtils.TOPIC_VERSION));
		String targetDatabase = tableMapping.getTargetTable().substring(0, tableMapping.getTargetTable().indexOf("."));
		String targetTable = tableMapping.getTargetTable().substring(tableMapping.getTargetTable().indexOf(".") + 1);
		
		// 生成schema映射
		Map<String, String> mappings = new LinkedHashMap<>();
		for (FieldMapping fieldMapping : tableMapping.getFieldMappings()) {
			if (StringUtils.isBlank(fieldMapping.getSourceField())) continue;
			
            mappings.put(fieldMapping.getSourceField(), parseKafkaFieldType(fieldMapping));
        }
		
		// 生成sql映射
		StringBuilder sqlBuilder = buildTransform(tableMapping);
		String content = template
				.replace("{{source_table_path}}", tablePath)
				.replace("{{topic}}", topic)
				.replace("\"{{source_field_mappings}}\"", JsonUtils.toJson(mappings))
				.replace("{{transform_sql}}", sqlBuilder.toString())
				.replace("{{target_database}}", targetDatabase)
				.replace("{{target_table}}", targetTable);
		return transformIgnore(tableMapping, JsonUtils.getJsonNode(content.getBytes()));
	}

	private static String builPatternPath(TableInfo tableInfo) {
		if (tableInfo.isRegex()) {
			String tableName = tableInfo.getName();
			return tableName.substring(0, DatabaseUtil.findRegexIndex(tableName)).concat("*");
		}
		return tableInfo.getName();
	}

	public static StringBuilder buildTransform(TableMapping tableMapping) {
		StringBuilder sqlBuilder = new StringBuilder();
		for (FieldMapping fieldMapping : tableMapping.getFieldMappings()) {
			if (StringUtils.isBlank(fieldMapping.getSourceField()) 
					&& StringUtils.isBlank(fieldMapping.getDefaultValue())) continue;
			
			if (StringUtils.isBlank(fieldMapping.getTargetField())) continue;
			
            sqlBuilder.append(parseKafkaSqlMapping(fieldMapping)).append(", ");
        }
		sqlBuilder.deleteCharAt(sqlBuilder.length() - 2);
		return sqlBuilder;
	}	
	
	/**
	 * sql字段映射
	 * @param fieldMapping
	 * @return
	 */
	private static String parseKafkaSqlMapping(FieldMapping fieldMapping) {
		String sourceField = fieldMapping.getSourceField();
		String targetField = fieldMapping.getTargetField();
		
		// 如果源字段和目标字段相同，则直接返回源字段
		if (sourceField.equals(targetField)) return sourceField;
		
		// 手动录入默认值，返回默认值映射关系
		if (StringUtils.isNoneBlank(fieldMapping.getDefaultValue())) return String.format("%s as %s", fieldMapping.getDefaultValue(), targetField);
		
		// 源和目标信息不同，返回源字段和目标字段的映射关系
		return String.format("%s as %s", sourceField, targetField);
	}

	/**
	 * 字段映射类型
	 * @param fieldMapping
	 * @return
	 */
	private static String parseKafkaFieldType(FieldMapping fieldMapping) {
		MysqlType mysqlType = MysqlType.getByName(fieldMapping.getSourceType());
		switch (mysqlType) {
			case INT, SMALLINT, BIGINT -> {
				return "bigint";
			}
			case TINYINT -> {
				return "tinyint";
			}
			case FLOAT -> {
				return "float";
			}
			case DOUBLE, DECIMAL -> {
                return "double";
            }
			case BINARY, VARBINARY -> {
                return "bytes";
            }
			case DATETIME, TIMESTAMP -> {
				return "timestamp_s";
			}
			case VARCHAR, CHAR, TEXT, TINYTEXT, MEDIUMTEXT, LONGTEXT, BLOB, TINYBLOB, MEDIUMBLOB  -> {
				return "string";
			}
			case JSON -> {
				return "json";
			}
			case BOOLEAN -> {
				return "boolean";
			}
			case DATE -> {
				return "date";
			}
			case TIME -> {
				return "time";
			}
			default -> {
				return "string";
			}
		}
	}

	/**
	 * 计算分区数
	 * @param size
	 * @return
	 */
	public static int partitionOfNum(int size) {
		return size > (TABLE_OF_PARTITION * TOPIC_OF_PARTITION) 
				? ((int) Math.ceil(BigDecimal.valueOf(size).divide(BigDecimal.valueOf(TABLE_OF_PARTITION)).doubleValue())) 
				: TOPIC_OF_PARTITION;
	}

	/**
	 * 重新构建模板内容
	 * @param templateContent
	 * @param jobParam
	 * @return
	 */
	public static String reCreateTemplate(String templateContent, JobParams jobParam) {
		StartModeEnum startMode = jobParam.getConsumeMode();
		
		JsonNode template = JsonUtils.fromJson(templateContent, JsonNode.class);
		ObjectNode source = (ObjectNode) template.get("source").get(0);
		if (StartModeEnum.EARLIEST == startMode) {
			source.put("start_mode", StartModeEnum.EARLIEST.getMode());
		}
		if (StartModeEnum.LATEST == startMode) {
			source.put("start_mode", StartModeEnum.LATEST.getMode());
		}
		if (StartModeEnum.TIMESTAMP == startMode) {
			source.put("start_mode", StartModeEnum.TIMESTAMP.getMode());
			source.put("start_mode.timestamp", jobParam.getStartTime());
		}
		return JsonUtils.toJson(template);
	}
	
}
