package com.flink.hbase.kafka2elasticsearch.timepartition;

import org.apache.flink.contrib.streaming.state.RocksDBStateBackend;
import org.apache.flink.runtime.state.StateBackend;
import org.apache.flink.runtime.state.memory.MemoryStateBackend;

import java.io.Serializable;
import java.util.*;

/**
 * Kafka 到 Elasticsearch 时间分区作业配置
 */
public class JobConfig implements Serializable {
    private static final long serialVersionUID = 1L;
    
    // Kafka 配置
    private final String kafkaBootstrapServers;
    private final List<String> kafkaTopics;
    private final String kafkaGroupId;
    private final String kafkaOffsetReset;
    private final Properties kafkaProperties;
    
    // Elasticsearch 配置
    private final Map<String, Object> esConfig;
    private final String indexName;
    private final Map<String, String> indexSettings;
    private final Map<String, Object> indexMappings;
    private final String documentIdField;
    private final boolean useAlias;
    
    // 时间处理配置
    private final String[] timeFields;
    private final String defaultTimeZone;
    private final boolean useEventTime;
    private final boolean useProcessingTime;
    
    // 并行度配置
    private final int globalParallelism;
    private final int kafkaParallelism;
    private final int processingParallelism;
    private final int esParallelism;
    
    // 批处理配置
    private final int esBatchSize;
    private final long esFlushInterval;
    
    // Checkpoint 配置
    private final boolean checkpointEnabled;
    private final long checkpointInterval;
    private final long checkpointMinPause;
    private final long checkpointTimeout;
    private final StateBackend stateBackend;
    
    // 重启策略配置
    private final int restartAttempts;
    private final long restartDelay;
    
    private JobConfig(Builder builder) {
        this.kafkaBootstrapServers = builder.kafkaBootstrapServers;
        this.kafkaTopics = builder.kafkaTopics;
        this.kafkaGroupId = builder.kafkaGroupId;
        this.kafkaOffsetReset = builder.kafkaOffsetReset;
        this.kafkaProperties = builder.kafkaProperties;
        
        this.esConfig = builder.esConfig;
        this.indexName = builder.indexName;
        this.indexSettings = builder.indexSettings;
        this.indexMappings = builder.indexMappings;
        this.documentIdField = builder.documentIdField;
        this.useAlias = builder.useAlias;
        
        this.timeFields = builder.timeFields;
        this.defaultTimeZone = builder.defaultTimeZone;
        this.useEventTime = builder.useEventTime;
        this.useProcessingTime = builder.useProcessingTime;
        
        this.globalParallelism = builder.globalParallelism;
        this.kafkaParallelism = builder.kafkaParallelism;
        this.processingParallelism = builder.processingParallelism;
        this.esParallelism = builder.esParallelism;
        
        this.esBatchSize = builder.esBatchSize;
        this.esFlushInterval = builder.esFlushInterval;
        
        this.checkpointEnabled = builder.checkpointEnabled;
        this.checkpointInterval = builder.checkpointInterval;
        this.checkpointMinPause = builder.checkpointMinPause;
        this.checkpointTimeout = builder.checkpointTimeout;
        this.stateBackend = builder.stateBackend;
        
        this.restartAttempts = builder.restartAttempts;
        this.restartDelay = builder.restartDelay;
    }
    
    // Getter 方法
    public String getKafkaBootstrapServers() { return kafkaBootstrapServers; }
    public List<String> getKafkaTopics() { return kafkaTopics; }
    public String getKafkaGroupId() { return kafkaGroupId; }
    public String getKafkaOffsetReset() { return kafkaOffsetReset; }
    public Properties getKafkaProperties() { return kafkaProperties; }
    
    public Map<String, Object> getEsConfig() { return esConfig; }
    public String getIndexName() { return indexName; }
    public Map<String, String> getIndexSettings() { return indexSettings; }
    public Map<String, Object> getIndexMappings() { return indexMappings; }
    public String getDocumentIdField() { return documentIdField; }
    public boolean isUseAlias() { return useAlias; }
    
    public String[] getTimeFields() { return timeFields; }
    public String getDefaultTimeZone() { return defaultTimeZone; }
    public boolean isUseEventTime() { return useEventTime; }
    public boolean isUseProcessingTime() { return useProcessingTime; }
    
    public int getGlobalParallelism() { return globalParallelism; }
    public int getKafkaParallelism() { return kafkaParallelism; }
    public int getProcessingParallelism() { return processingParallelism; }
    public int getEsParallelism() { return esParallelism; }
    
    public int getEsBatchSize() { return esBatchSize; }
    public long getEsFlushInterval() { return esFlushInterval; }
    
    public boolean isCheckpointEnabled() { return checkpointEnabled; }
    public long getCheckpointInterval() { return checkpointInterval; }
    public long getCheckpointMinPause() { return checkpointMinPause; }
    public long getCheckpointTimeout() { return checkpointTimeout; }
    public StateBackend getStateBackend() { return stateBackend; }
    
    public int getRestartAttempts() { return restartAttempts; }
    public long getRestartDelay() { return restartDelay; }
    
    /**
     * 构建器
     */
    public static class Builder {
        // Kafka 配置
        private String kafkaBootstrapServers = "localhost:9092";
        private List<String> kafkaTopics = Arrays.asList("test-topic");
        private String kafkaGroupId = "es-consumer-group";
        private String kafkaOffsetReset = "latest";
        private Properties kafkaProperties = new Properties();
        
        // Elasticsearch 配置
        private Map<String, Object> esConfig = new HashMap<>();
        private String indexName = "default-index";
        private Map<String, String> indexSettings = new HashMap<>();
        private Map<String, Object> indexMappings = new HashMap<>();
        private String documentIdField;
        private boolean useAlias = true;
        
        // 时间处理配置
        private String[] timeFields = {"timestamp", "create_time", "update_time", "event_time"};
        private String defaultTimeZone = "Asia/Shanghai";
        private boolean useEventTime = true;
        private boolean useProcessingTime = false;
        
        // 并行度配置
        private int globalParallelism = 1;
        private int kafkaParallelism = 1;
        private int processingParallelism = 1;
        private int esParallelism = 1;
        
        // 批处理配置
        private int esBatchSize = 100;
        private long esFlushInterval = 5000;
        
        // Checkpoint 配置
        private boolean checkpointEnabled = true;
        private long checkpointInterval = 60000;
        private long checkpointMinPause = 5000;
        private long checkpointTimeout = 600000;
        private StateBackend stateBackend;
        
        // 重启策略配置
        private int restartAttempts = 3;
        private long restartDelay = 10000;
        
        public Builder() {
            // 初始化默认 ES 配置
            esConfig.put("hosts", "localhost:9200");
            esConfig.put("scheme", "http");
            esConfig.put("connect.timeout", 5000);
            esConfig.put("socket.timeout", 60000);
            esConfig.put("connection.request.timeout", 5000);
            
            // 初始化默认索引设置
            indexSettings.put("index.number_of_shards", "3");
            indexSettings.put("index.number_of_replicas", "1");
            indexSettings.put("index.refresh_interval", "30s");
            indexSettings.put("index.max_result_window", "100000");
            
            // 初始化默认 Kafka 属性
            kafkaProperties.put("key.deserializer", "org.apache.kafka.common.serialization.StringDeserializer");
            kafkaProperties.put("value.deserializer", "org.apache.kafka.common.serialization.StringDeserializer");
            kafkaProperties.put("enable.auto.commit", "true");
            kafkaProperties.put("auto.commit.interval.ms", "1000");
        }
        
        // Kafka 配置方法
        public Builder kafkaBootstrapServers(String kafkaBootstrapServers) {
            this.kafkaBootstrapServers = kafkaBootstrapServers;
            return this;
        }
        
        public Builder kafkaTopics(List<String> kafkaTopics) {
            this.kafkaTopics = kafkaTopics;
            return this;
        }
        
        public Builder kafkaGroupId(String kafkaGroupId) {
            this.kafkaGroupId = kafkaGroupId;
            return this;
        }
        
        public Builder kafkaOffsetReset(String kafkaOffsetReset) {
            this.kafkaOffsetReset = kafkaOffsetReset;
            return this;
        }
        
        public Builder kafkaProperties(Properties kafkaProperties) {
            this.kafkaProperties = kafkaProperties;
            return this;
        }
        
        // Elasticsearch 配置方法
        public Builder esHosts(String esHosts) {
            this.esConfig.put("hosts", esHosts);
            return this;
        }
        
        public Builder esUsername(String username) {
            this.esConfig.put("username", username);
            return this;
        }
        
        public Builder esPassword(String password) {
            this.esConfig.put("password", password);
            return this;
        }
        
        public Builder esConfig(Map<String, Object> esConfig) {
            this.esConfig.putAll(esConfig);
            return this;
        }
        
        public Builder indexName(String indexName) {
            this.indexName = indexName;
            return this;
        }
        
        public Builder indexSettings(Map<String, String> indexSettings) {
            this.indexSettings.putAll(indexSettings);
            return this;
        }
        
        public Builder indexMappings(Map<String, Object> indexMappings) {
            this.indexMappings.putAll(indexMappings);
            return this;
        }
        
        public Builder documentIdField(String documentIdField) {
            this.documentIdField = documentIdField;
            return this;
        }
        
        public Builder useAlias(boolean useAlias) {
            this.useAlias = useAlias;
            return this;
        }
        
        // 时间处理配置方法
        public Builder timeFields(String... timeFields) {
            this.timeFields = timeFields;
            return this;
        }
        
        public Builder defaultTimeZone(String defaultTimeZone) {
            this.defaultTimeZone = defaultTimeZone;
            return this;
        }
        
        public Builder useEventTime(boolean useEventTime) {
            this.useEventTime = useEventTime;
            return this;
        }
        
        public Builder useProcessingTime(boolean useProcessingTime) {
            this.useProcessingTime = useProcessingTime;
            return this;
        }
        
        // 并行度配置方法
        public Builder globalParallelism(int globalParallelism) {
            this.globalParallelism = globalParallelism;
            return this;
        }
        
        public Builder kafkaParallelism(int kafkaParallelism) {
            this.kafkaParallelism = kafkaParallelism;
            return this;
        }
        
        public Builder processingParallelism(int processingParallelism) {
            this.processingParallelism = processingParallelism;
            return this;
        }
        
        public Builder esParallelism(int esParallelism) {
            this.esParallelism = esParallelism;
            return this;
        }
        
        // 批处理配置方法
        public Builder esBatchSize(int esBatchSize) {
            this.esBatchSize = esBatchSize;
            return this;
        }
        
        public Builder esFlushInterval(long esFlushInterval) {
            this.esFlushInterval = esFlushInterval;
            return this;
        }
        
        // Checkpoint 配置方法
        public Builder checkpointEnabled(boolean checkpointEnabled) {
            this.checkpointEnabled = checkpointEnabled;
            return this;
        }
        
        public Builder checkpointInterval(long checkpointInterval) {
            this.checkpointInterval = checkpointInterval;
            return this;
        }
        
        public Builder checkpointMinPause(long checkpointMinPause) {
            this.checkpointMinPause = checkpointMinPause;
            return this;
        }
        
        public Builder checkpointTimeout(long checkpointTimeout) {
            this.checkpointTimeout = checkpointTimeout;
            return this;
        }
        
        public Builder stateBackend(String stateBackendType) {
            switch (stateBackendType.toLowerCase()) {
                case "memory":
                    this.stateBackend = new MemoryStateBackend();
                    break;
                case "rocksdb":
                    this.stateBackend = new RocksDBStateBackend("file:///tmp/flink-checkpoints");
                    break;
                default:
                    throw new IllegalArgumentException("Unknown state backend: " + stateBackendType);
            }
            return this;
        }
        
        public Builder stateBackend(StateBackend stateBackend) {
            this.stateBackend = stateBackend;
            return this;
        }
        
        // 重启策略配置方法
        public Builder restartAttempts(int restartAttempts) {
            this.restartAttempts = restartAttempts;
            return this;
        }
        
        public Builder restartDelay(long restartDelay) {
            this.restartDelay = restartDelay;
            return this;
        }
        
        public JobConfig build() {
            validate();
            return new JobConfig(this);
        }
        
        private void validate() {
            if (kafkaBootstrapServers == null || kafkaBootstrapServers.trim().isEmpty()) {
                throw new IllegalArgumentException("Kafka bootstrap servers cannot be null or empty");
            }
            if (kafkaTopics == null || kafkaTopics.isEmpty()) {
                throw new IllegalArgumentException("Kafka topics cannot be null or empty");
            }
            if (kafkaGroupId == null || kafkaGroupId.trim().isEmpty()) {
                throw new IllegalArgumentException("Kafka group ID cannot be null or empty");
            }
            if (indexName == null || indexName.trim().isEmpty()) {
                throw new IllegalArgumentException("Index name cannot be null or empty");
            }
            if (globalParallelism < 1) {
                throw new IllegalArgumentException("Global parallelism must be >= 1");
            }
            if (kafkaParallelism < 1) {
                throw new IllegalArgumentException("Kafka parallelism must be >= 1");
            }
            if (processingParallelism < 1) {
                throw new IllegalArgumentException("Processing parallelism must be >= 1");
            }
            if (esParallelism < 1) {
                throw new IllegalArgumentException("ES parallelism must be >= 1");
            }
            if (esBatchSize < 1) {
                throw new IllegalArgumentException("ES batch size must be >= 1");
            }
            if (esFlushInterval < 1000) {
                throw new IllegalArgumentException("ES flush interval must be >= 1000ms");
            }
        }
    }
    
    /**
     * 创建构建器
     */
    public static Builder builder() {
        return new Builder();
    }
    
    /**
     * 从 Map 创建构建器
     */
    public static Builder fromMap(Map<String, Object> config) {
        Builder builder = builder();
        
        // Kafka 配置
        if (config.containsKey("kafka.bootstrap.servers")) {
            builder.kafkaBootstrapServers((String) config.get("kafka.bootstrap.servers"));
        }
        if (config.containsKey("kafka.topics")) {
            @SuppressWarnings("unchecked")
            List<String> topics = (List<String>) config.get("kafka.topics");
            builder.kafkaTopics(topics);
        }
        if (config.containsKey("kafka.group.id")) {
            builder.kafkaGroupId((String) config.get("kafka.group.id"));
        }
        if (config.containsKey("kafka.offset.reset")) {
            builder.kafkaOffsetReset((String) config.get("kafka.offset.reset"));
        }
        
        // Elasticsearch 配置
        if (config.containsKey("es.hosts")) {
            builder.esHosts((String) config.get("es.hosts"));
        }
        if (config.containsKey("es.username")) {
            builder.esUsername((String) config.get("es.username"));
        }
        if (config.containsKey("es.password")) {
            builder.esPassword((String) config.get("es.password"));
        }
        if (config.containsKey("index.name")) {
            builder.indexName((String) config.get("index.name"));
        }
        if (config.containsKey("document.id.field")) {
            builder.documentIdField((String) config.get("document.id.field"));
        }
        
        // 时间处理配置
        if (config.containsKey("time.fields")) {
            String[] timeFields = ((String) config.get("time.fields")).split(",");
            builder.timeFields(timeFields);
        }
        if (config.containsKey("default.timezone")) {
            builder.defaultTimeZone((String) config.get("default.timezone"));
        }
        
        // 并行度配置
        if (config.containsKey("parallelism.global")) {
            builder.globalParallelism((Integer) config.get("parallelism.global"));
        }
        if (config.containsKey("parallelism.kafka")) {
            builder.kafkaParallelism((Integer) config.get("parallelism.kafka"));
        }
        if (config.containsKey("parallelism.processing")) {
            builder.processingParallelism((Integer) config.get("parallelism.processing"));
        }
        if (config.containsKey("parallelism.es")) {
            builder.esParallelism((Integer) config.get("parallelism.es"));
        }
        
        // 批处理配置
        if (config.containsKey("es.batch.size")) {
            builder.esBatchSize((Integer) config.get("es.batch.size"));
        }
        if (config.containsKey("es.flush.interval")) {
            builder.esFlushInterval((Long) config.get("es.flush.interval"));
        }
        
        // Checkpoint 配置
        if (config.containsKey("checkpoint.enabled")) {
            builder.checkpointEnabled((Boolean) config.get("checkpoint.enabled"));
        }
        if (config.containsKey("checkpoint.interval")) {
            builder.checkpointInterval((Long) config.get("checkpoint.interval"));
        }
        
        return builder;
    }
    
    @Override
    public String toString() {
        return "JobConfig{" +
                "kafkaBootstrapServers='" + kafkaBootstrapServers + '\'' +
                ", kafkaTopics=" + kafkaTopics +
                ", kafkaGroupId='" + kafkaGroupId + '\'' +
                ", indexName='" + indexName + '\'' +
                ", globalParallelism=" + globalParallelism +
                ", kafkaParallelism=" + kafkaParallelism +
                ", processingParallelism=" + processingParallelism +
                ", esParallelism=" + esParallelism +
                ", esBatchSize=" + esBatchSize +
                ", esFlushInterval=" + esFlushInterval +
                ", checkpointEnabled=" + checkpointEnabled +
                ", checkpointInterval=" + checkpointInterval +
                '}';
    }
} 