package com.flink.hbase.kafka2elasticsearch.timepartition;

import com.fasterxml.jackson.databind.ObjectMapper;
import com.fasterxml.jackson.datatype.jsr310.JavaTimeModule;
import org.apache.flink.configuration.Configuration;
import org.apache.flink.streaming.api.functions.sink.RichSinkFunction;
import org.apache.http.HttpHost;
import org.apache.http.auth.AuthScope;
import org.apache.http.auth.UsernamePasswordCredentials;
import org.apache.http.client.CredentialsProvider;
import org.apache.http.impl.client.BasicCredentialsProvider;
import org.apache.http.impl.nio.client.HttpAsyncClientBuilder;
import org.elasticsearch.action.bulk.BulkRequest;
import org.elasticsearch.action.bulk.BulkResponse;
import org.elasticsearch.action.index.IndexRequest;
import org.elasticsearch.client.RequestOptions;
import org.elasticsearch.client.RestClient;
import org.elasticsearch.client.RestClientBuilder;
import org.elasticsearch.client.RestHighLevelClient;
import org.elasticsearch.common.xcontent.XContentType;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;

import java.io.IOException;
import java.time.LocalDateTime;
import java.time.format.DateTimeFormatter;
import java.util.ArrayList;
import java.util.List;
import java.util.Map;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.Executors;
import java.util.concurrent.ScheduledExecutorService;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.atomic.AtomicLong;

/**
 * 时间分区的 Elasticsearch Sink
 * 根据业务时间自动创建按月分区的索引和别名
 */
public class TimePartitionedElasticsearchSink extends RichSinkFunction<BusinessTimeExtractor.TimePartitionedMessage> {
    private static final Logger LOG = LoggerFactory.getLogger(TimePartitionedElasticsearchSink.class);
    private static final long serialVersionUID = 1L;
    
    private final int batchSize;
    private final long flushInterval;
    private final Map<String, Object> esConfig;
    private final ElasticsearchIndexManager indexManager;
    private final boolean useAlias;
    private final String documentIdField;
    
    private transient RestHighLevelClient client;
    private transient ObjectMapper objectMapper;
    private transient Map<String, List<BusinessTimeExtractor.TimePartitionedMessage>> batchBuffers;
    private transient ScheduledExecutorService scheduler;
    private transient long lastFlushTime;
    private transient AtomicLong totalDocumentsWritten;
    private transient DateTimeFormatter timestampFormatter;
    
    public TimePartitionedElasticsearchSink(int batchSize, long flushInterval, 
                                          Map<String, Object> esConfig,
                                          ElasticsearchIndexManager indexManager,
                                          boolean useAlias, String documentIdField) {
        this.batchSize = batchSize;
        this.flushInterval = flushInterval;
        this.esConfig = esConfig;
        this.indexManager = indexManager;
        this.useAlias = useAlias;
        this.documentIdField = documentIdField;
    }
    
    @Override
    public void open(Configuration parameters) throws Exception {
        super.open(parameters);
        
        LOG.info("Opening TimePartitionedElasticsearchSink with batch size: {}, flush interval: {}ms", 
                batchSize, flushInterval);
        
        // 初始化 ES 客户端
        initializeElasticsearchClient();
        
        // 初始化 ObjectMapper
        objectMapper = new ObjectMapper();
        objectMapper.registerModule(new JavaTimeModule());
        timestampFormatter = DateTimeFormatter.ofPattern("yyyy-MM-dd HH:mm:ss");
        
        // 初始化批量缓冲区
        batchBuffers = new ConcurrentHashMap<>();
        
        // 初始化索引管理器
        indexManager.initialize();
        
        // 初始化统计信息
        totalDocumentsWritten = new AtomicLong(0);
        lastFlushTime = System.currentTimeMillis();
        
        // 启动定期刷新任务
        scheduler = Executors.newScheduledThreadPool(1);
        scheduler.scheduleAtFixedRate(this::flushAll, flushInterval, flushInterval, TimeUnit.MILLISECONDS);
        
        LOG.info("TimePartitionedElasticsearchSink initialized for task {}", 
                getRuntimeContext().getIndexOfThisSubtask());
    }
    
    @Override
    public void invoke(BusinessTimeExtractor.TimePartitionedMessage message, Context context) throws Exception {
        if (message == null || message.getOriginalMessage() == null) {
            LOG.warn("Received null message, skipping");
            return;
        }
        
        try {
            // 确保索引和别名存在
            TimePartitionInfo partitionInfo = message.getPartitionInfo();
            indexManager.ensureIndexAndAlias(partitionInfo);
            
            // 获取目标索引名称
            String targetIndex = useAlias ? partitionInfo.getAliasName() : partitionInfo.getActualIndexName();
            
            // 添加到对应的批量缓冲区
            batchBuffers.computeIfAbsent(targetIndex, k -> new ArrayList<>()).add(message);
            
            // 检查是否需要刷新
            if (shouldFlush(targetIndex)) {
                flushIndex(targetIndex);
            }
            
        } catch (Exception e) {
            LOG.error("Error processing message: {}", message, e);
            throw e;
        }
    }
    
    @Override
    public void close() throws Exception {
        LOG.info("Closing TimePartitionedElasticsearchSink");
        
        // 停止定时任务
        if (scheduler != null) {
            scheduler.shutdown();
            if (!scheduler.awaitTermination(30, TimeUnit.SECONDS)) {
                scheduler.shutdownNow();
            }
        }
        
        // 刷新所有剩余数据
        flushAll();
        
        // 关闭客户端和索引管理器
        if (client != null) {
            client.close();
        }
        if (indexManager != null) {
            indexManager.close();
        }
        
        LOG.info("TimePartitionedElasticsearchSink closed, total documents written: {}", 
                totalDocumentsWritten.get());
        
        super.close();
    }
    
    /**
     * 初始化 Elasticsearch 客户端
     */
    private void initializeElasticsearchClient() {
        // 解析主机配置
        String hostsConfig = (String) esConfig.get("hosts");
        String[] hostArray = hostsConfig.split(",");
        HttpHost[] hosts = new HttpHost[hostArray.length];
        
        for (int i = 0; i < hostArray.length; i++) {
            String[] hostPort = hostArray[i].trim().split(":");
            String host = hostPort[0];
            int port = hostPort.length > 1 ? Integer.parseInt(hostPort[1]) : 9200;
            String scheme = (String) esConfig.getOrDefault("scheme", "http");
            hosts[i] = new HttpHost(host, port, scheme);
        }
        
        // 构建客户端
        RestClientBuilder builder = RestClient.builder(hosts);
        
        // 配置认证
        if (esConfig.containsKey("username") && esConfig.containsKey("password")) {
            String username = (String) esConfig.get("username");
            String password = (String) esConfig.get("password");
            
            final CredentialsProvider credentialsProvider = new BasicCredentialsProvider();
            credentialsProvider.setCredentials(AuthScope.ANY, 
                new UsernamePasswordCredentials(username, password));
            
            builder.setHttpClientConfigCallback(new RestClientBuilder.HttpClientConfigCallback() {
                @Override
                public HttpAsyncClientBuilder customizeHttpClient(HttpAsyncClientBuilder httpClientBuilder) {
                    return httpClientBuilder.setDefaultCredentialsProvider(credentialsProvider);
                }
            });
        }
        
        // 配置超时
        builder.setRequestConfigCallback(requestConfigBuilder -> {
            return requestConfigBuilder
                .setConnectTimeout((Integer) esConfig.getOrDefault("connect.timeout", 5000))
                .setSocketTimeout((Integer) esConfig.getOrDefault("socket.timeout", 60000))
                .setConnectionRequestTimeout((Integer) esConfig.getOrDefault("connection.request.timeout", 5000));
        });
        
        client = new RestHighLevelClient(builder);
    }
    
    /**
     * 检查是否需要刷新指定索引
     */
    private boolean shouldFlush(String indexName) {
        List<BusinessTimeExtractor.TimePartitionedMessage> buffer = batchBuffers.get(indexName);
        if (buffer == null) {
            return false;
        }
        
        return buffer.size() >= batchSize || 
               (System.currentTimeMillis() - lastFlushTime) >= flushInterval;
    }
    
    /**
     * 刷新指定索引的数据
     */
    private void flushIndex(String indexName) {
        List<BusinessTimeExtractor.TimePartitionedMessage> buffer = batchBuffers.get(indexName);
        if (buffer == null || buffer.isEmpty()) {
            return;
        }
        
        List<BusinessTimeExtractor.TimePartitionedMessage> toFlush;
        synchronized (buffer) {
            toFlush = new ArrayList<>(buffer);
            buffer.clear();
        }
        
        if (toFlush.isEmpty()) {
            return;
        }
        
        try {
            executeBulkRequest(indexName, toFlush);
            totalDocumentsWritten.addAndGet(toFlush.size());
            LOG.debug("Flushed {} documents to index: {}", toFlush.size(), indexName);
        } catch (Exception e) {
            LOG.error("Error flushing {} documents to index: {}", toFlush.size(), indexName, e);
            // 重新加入缓冲区以便重试
            synchronized (buffer) {
                buffer.addAll(0, toFlush);
            }
            throw new RuntimeException("Failed to flush documents to ES", e);
        }
    }
    
    /**
     * 刷新所有索引的数据
     */
    private void flushAll() {
        for (String indexName : batchBuffers.keySet()) {
            try {
                flushIndex(indexName);
            } catch (Exception e) {
                LOG.error("Error flushing index: {}", indexName, e);
            }
        }
        lastFlushTime = System.currentTimeMillis();
    }
    
    /**
     * 执行批量请求
     */
    private void executeBulkRequest(String indexName, List<BusinessTimeExtractor.TimePartitionedMessage> messages) throws IOException {
        BulkRequest bulkRequest = new BulkRequest();
        
        for (BusinessTimeExtractor.TimePartitionedMessage message : messages) {
            try {
                IndexRequest indexRequest = createIndexRequest(indexName, message);
                if (indexRequest != null) {
                    bulkRequest.add(indexRequest);
                }
            } catch (Exception e) {
                LOG.error("Error creating index request for message: {}", message, e);
            }
        }
        
        if (bulkRequest.numberOfActions() == 0) {
            LOG.warn("No valid documents to index for: {}", indexName);
            return;
        }
        
        long startTime = System.currentTimeMillis();
        BulkResponse bulkResponse = client.bulk(bulkRequest, RequestOptions.DEFAULT);
        long endTime = System.currentTimeMillis();
        
        if (bulkResponse.hasFailures()) {
            LOG.error("Bulk request has failures for index {}: {}", indexName, bulkResponse.buildFailureMessage());
        } else {
            LOG.debug("Successfully indexed {} documents to {} in {} ms", 
                    bulkRequest.numberOfActions(), indexName, endTime - startTime);
        }
    }
    
    /**
     * 创建索引请求
     */
    private IndexRequest createIndexRequest(String indexName, BusinessTimeExtractor.TimePartitionedMessage message) throws Exception {
        String documentJson = processMessage(message);
        if (documentJson == null) {
            return null;
        }
        
        IndexRequest indexRequest = new IndexRequest(indexName);
        
        // 设置文档ID
        String documentId = extractDocumentId(message);
        if (documentId != null) {
            indexRequest.id(documentId);
        }
        
        indexRequest.source(documentJson, XContentType.JSON);
        
        return indexRequest;
    }
    
    /**
     * 处理消息，添加时间分区信息
     */
    private String processMessage(BusinessTimeExtractor.TimePartitionedMessage message) throws Exception {
        try {
            // 解析原始消息
            Map<String, Object> messageMap = objectMapper.readValue(message.getOriginalMessage(), Map.class);
            
            // 添加时间分区信息
            messageMap.put("_partition_info", Map.of(
                "business_time", message.getBusinessTime().format(timestampFormatter),
                "partition_month", message.getPartitionInfo().getMonthString(),
                "index_name", message.getPartitionInfo().getActualIndexName(),
                "alias_name", message.getPartitionInfo().getAliasName(),
                "ingestion_time", LocalDateTime.now().format(timestampFormatter)
            ));
            
            return objectMapper.writeValueAsString(messageMap);
            
        } catch (Exception e) {
            LOG.error("Error processing message: {}", message.getOriginalMessage(), e);
            throw e;
        }
    }
    
    /**
     * 提取文档ID
     */
    private String extractDocumentId(BusinessTimeExtractor.TimePartitionedMessage message) {
        if (documentIdField == null || documentIdField.trim().isEmpty()) {
            return null;
        }
        
        try {
            Map<String, Object> messageMap = objectMapper.readValue(message.getOriginalMessage(), Map.class);
            Object idValue = getNestedValue(messageMap, documentIdField);
            return idValue != null ? idValue.toString() : null;
        } catch (Exception e) {
            LOG.warn("Error extracting document ID from field: {}", documentIdField, e);
            return null;
        }
    }
    
    /**
     * 获取嵌套字段值
     */
    private Object getNestedValue(Map<String, Object> map, String fieldPath) {
        String[] paths = fieldPath.split("\\.");
        Object current = map;
        
        for (String path : paths) {
            if (current instanceof Map) {
                current = ((Map<?, ?>) current).get(path);
            } else {
                return null;
            }
        }
        
        return current;
    }
    
    /**
     * 获取统计信息
     */
    public void logStatistics() {
        int totalBufferedMessages = batchBuffers.values().stream()
            .mapToInt(List::size)
            .sum();
        
        LOG.info("ES Sink Statistics - Written: {}, Buffered: {}, Indices: {}, Aliases: {}", 
                totalDocumentsWritten.get(), 
                totalBufferedMessages,
                indexManager.getCreatedIndicesCount(),
                indexManager.getCreatedAliasesCount());
    }
    
    /**
     * 构建器
     */
    public static class Builder {
        private int batchSize = 100;
        private long flushInterval = 5000;
        private Map<String, Object> esConfig;
        private ElasticsearchIndexManager indexManager;
        private boolean useAlias = true;
        private String documentIdField;
        
        public Builder batchSize(int batchSize) {
            this.batchSize = batchSize;
            return this;
        }
        
        public Builder flushInterval(long flushInterval) {
            this.flushInterval = flushInterval;
            return this;
        }
        
        public Builder esConfig(Map<String, Object> esConfig) {
            this.esConfig = esConfig;
            return this;
        }
        
        public Builder indexManager(ElasticsearchIndexManager indexManager) {
            this.indexManager = indexManager;
            return this;
        }
        
        public Builder useAlias(boolean useAlias) {
            this.useAlias = useAlias;
            return this;
        }
        
        public Builder documentIdField(String documentIdField) {
            this.documentIdField = documentIdField;
            return this;
        }
        
        public TimePartitionedElasticsearchSink build() {
            if (esConfig == null) {
                throw new IllegalArgumentException("ES config is required");
            }
            if (indexManager == null) {
                throw new IllegalArgumentException("Index manager is required");
            }
            return new TimePartitionedElasticsearchSink(batchSize, flushInterval, esConfig, 
                                                      indexManager, useAlias, documentIdField);
        }
    }
    
    /**
     * 创建构建器
     */
    public static Builder builder() {
        return new Builder();
    }
} 