package com.flink.hbase.kafka2elasticsearch;

import com.fasterxml.jackson.databind.ObjectMapper;
import com.fasterxml.jackson.datatype.jsr310.JavaTimeModule;
import org.apache.flink.configuration.Configuration;
import org.apache.flink.streaming.api.functions.sink.RichSinkFunction;
import org.apache.http.HttpHost;
import org.apache.http.auth.AuthScope;
import org.apache.http.auth.UsernamePasswordCredentials;
import org.apache.http.client.CredentialsProvider;
import org.apache.http.impl.client.BasicCredentialsProvider;
import org.apache.http.impl.nio.client.HttpAsyncClientBuilder;
import org.elasticsearch.action.ActionListener;
import org.elasticsearch.action.bulk.BulkRequest;
import org.elasticsearch.action.bulk.BulkResponse;
import org.elasticsearch.action.index.IndexRequest;
import org.elasticsearch.client.RequestOptions;
import org.elasticsearch.client.RestClient;
import org.elasticsearch.client.RestClientBuilder;
import org.elasticsearch.client.RestHighLevelClient;
import org.elasticsearch.common.xcontent.XContentType;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;

import java.io.IOException;
import java.time.LocalDateTime;
import java.time.format.DateTimeFormatter;
import java.util.ArrayList;
import java.util.List;
import java.util.Map;
import java.util.concurrent.TimeUnit;

/**
 * Elasticsearch Sink
 * 将 UserDocument 批量写入到 Elasticsearch 索引
 */
public class ElasticsearchSink extends RichSinkFunction<UserDocument> {
    private static final Logger LOG = LoggerFactory.getLogger(ElasticsearchSink.class);
    private static final long serialVersionUID = 1L;
    
    private final String indexName;
    private final int batchSize;
    private final long flushInterval;
    private final Map<String, Object> esConfig;
    
    private transient RestHighLevelClient client;
    private transient ObjectMapper objectMapper;
    private transient List<UserDocument> batch;
    private transient long lastFlushTime;
    private transient DateTimeFormatter dateFormatter;
    
    public ElasticsearchSink(String indexName, int batchSize, long flushInterval, Map<String, Object> esConfig) {
        this.indexName = indexName;
        this.batchSize = batchSize;
        this.flushInterval = flushInterval;
        this.esConfig = esConfig;
    }
    
    @Override
    public void open(Configuration parameters) throws Exception {
        super.open(parameters);
        
        LOG.info("Opening Elasticsearch sink for index: {}", indexName);
        
        // 初始化 ES 客户端
        initializeElasticsearchClient();
        
        // 初始化 ObjectMapper
        objectMapper = new ObjectMapper();
        objectMapper.registerModule(new JavaTimeModule());
        
        // 初始化批量处理
        batch = new ArrayList<>();
        lastFlushTime = System.currentTimeMillis();
        dateFormatter = DateTimeFormatter.ofPattern("yyyy-MM-dd");
        
        LOG.info("Elasticsearch sink initialized with batch size: {}, flush interval: {}ms", 
                batchSize, flushInterval);
    }
    
    @Override
    public void invoke(UserDocument document, Context context) throws Exception {
        if (document == null || !document.isValid()) {
            LOG.warn("Invalid document, skipping: {}", document);
            return;
        }
        
        // 添加到批量队列
        batch.add(document);
        
        // 检查是否需要刷新
        if (shouldFlush()) {
            flush();
        }
    }
    
    @Override
    public void close() throws Exception {
        LOG.info("Closing Elasticsearch sink");
        
        // 刷新剩余的数据
        if (batch != null && !batch.isEmpty()) {
            flush();
        }
        
        // 关闭客户端
        if (client != null) {
            client.close();
        }
        
        super.close();
    }
    
    /**
     * 初始化 Elasticsearch 客户端
     */
    private void initializeElasticsearchClient() {
        // 解析主机配置
        String hostsConfig = (String) esConfig.get("hosts");
        String[] hostArray = hostsConfig.split(",");
        HttpHost[] hosts = new HttpHost[hostArray.length];
        
        for (int i = 0; i < hostArray.length; i++) {
            String[] hostPort = hostArray[i].trim().split(":");
            String host = hostPort[0];
            int port = hostPort.length > 1 ? Integer.parseInt(hostPort[1]) : 9200;
            String scheme = (String) esConfig.getOrDefault("scheme", "http");
            hosts[i] = new HttpHost(host, port, scheme);
        }
        
        // 构建客户端
        RestClientBuilder builder = RestClient.builder(hosts);
        
        // 配置认证
        if (esConfig.containsKey("username") && esConfig.containsKey("password")) {
            String username = (String) esConfig.get("username");
            String password = (String) esConfig.get("password");
            
            final CredentialsProvider credentialsProvider = new BasicCredentialsProvider();
            credentialsProvider.setCredentials(AuthScope.ANY, 
                new UsernamePasswordCredentials(username, password));
            
            builder.setHttpClientConfigCallback(new RestClientBuilder.HttpClientConfigCallback() {
                @Override
                public HttpAsyncClientBuilder customizeHttpClient(HttpAsyncClientBuilder httpClientBuilder) {
                    return httpClientBuilder.setDefaultCredentialsProvider(credentialsProvider);
                }
            });
        }
        
        // 配置超时
        builder.setRequestConfigCallback(requestConfigBuilder -> {
            return requestConfigBuilder
                .setConnectTimeout((Integer) esConfig.getOrDefault("connect.timeout", 5000))
                .setSocketTimeout((Integer) esConfig.getOrDefault("socket.timeout", 60000))
                .setConnectionRequestTimeout((Integer) esConfig.getOrDefault("connection.request.timeout", 5000));
        });
        
        client = new RestHighLevelClient(builder);
    }
    
    /**
     * 检查是否需要刷新
     */
    private boolean shouldFlush() {
        return batch.size() >= batchSize || 
               (System.currentTimeMillis() - lastFlushTime) >= flushInterval;
    }
    
    /**
     * 刷新批量数据到 Elasticsearch
     */
    private void flush() throws IOException {
        if (batch.isEmpty()) {
            return;
        }
        
        long startTime = System.currentTimeMillis();
        
        // 构建批量请求
        BulkRequest bulkRequest = new BulkRequest();
        
        for (UserDocument document : batch) {
            try {
                IndexRequest indexRequest = createIndexRequest(document);
                bulkRequest.add(indexRequest);
            } catch (Exception e) {
                LOG.error("Error creating index request for document: {}", document, e);
            }
        }
        
        if (bulkRequest.numberOfActions() == 0) {
            LOG.warn("No valid documents to index");
            batch.clear();
            return;
        }
        
        // 执行批量请求
        try {
            BulkResponse bulkResponse = client.bulk(bulkRequest, RequestOptions.DEFAULT);
            
            long endTime = System.currentTimeMillis();
            LOG.info("Flushed {} documents to ES in {} ms", batch.size(), endTime - startTime);
            
            // 处理响应
            if (bulkResponse.hasFailures()) {
                LOG.error("Bulk request has failures: {}", bulkResponse.buildFailureMessage());
                // 可以选择重试或记录失败的文档
            }
            
        } catch (IOException e) {
            LOG.error("Error executing bulk request", e);
            throw e;
        }
        
        // 清空批量队列
        batch.clear();
        lastFlushTime = System.currentTimeMillis();
    }
    
    /**
     * 创建索引请求
     */
    private IndexRequest createIndexRequest(UserDocument document) throws Exception {
        String actualIndexName = getIndexName(document);
        String documentId = document.getDocumentId();
        
        // 转换为 JSON
        String json = objectMapper.writeValueAsString(document);
        
        IndexRequest indexRequest = new IndexRequest(actualIndexName)
            .id(documentId)
            .source(json, XContentType.JSON);
        
        return indexRequest;
    }
    
    /**
     * 获取实际的索引名称（支持按日期分片）
     */
    private String getIndexName(UserDocument document) {
        Boolean datePartitioned = (Boolean) esConfig.getOrDefault("date.partitioned", false);
        
        if (datePartitioned) {
            LocalDateTime dateTime = document.getCreateTime() != null ? 
                document.getCreateTime() : LocalDateTime.now();
            String dateStr = dateTime.format(dateFormatter);
            return indexName + "-" + dateStr;
        } else {
            return indexName;
        }
    }
    
    /**
     * 异步 Elasticsearch Sink（非阻塞版本）
     */
    public static class AsyncElasticsearchSink extends ElasticsearchSink {
        
        public AsyncElasticsearchSink(String indexName, int batchSize, long flushInterval, Map<String, Object> esConfig) {
            super(indexName, batchSize, flushInterval, esConfig);
        }
        
        @Override
        protected void flush() throws IOException {
            if (batch.isEmpty()) {
                return;
            }
            
            long startTime = System.currentTimeMillis();
            
            // 构建批量请求
            BulkRequest bulkRequest = new BulkRequest();
            
            for (UserDocument document : batch) {
                try {
                    IndexRequest indexRequest = createIndexRequest(document);
                    bulkRequest.add(indexRequest);
                } catch (Exception e) {
                    LOG.error("Error creating index request for document: {}", document, e);
                }
            }
            
            if (bulkRequest.numberOfActions() == 0) {
                LOG.warn("No valid documents to index");
                batch.clear();
                return;
            }
            
            // 异步执行批量请求
            client.bulkAsync(bulkRequest, RequestOptions.DEFAULT, new ActionListener<BulkResponse>() {
                @Override
                public void onResponse(BulkResponse bulkResponse) {
                    long endTime = System.currentTimeMillis();
                    LOG.info("Async flushed {} documents to ES in {} ms", batch.size(), endTime - startTime);
                    
                    if (bulkResponse.hasFailures()) {
                        LOG.error("Bulk request has failures: {}", bulkResponse.buildFailureMessage());
                    }
                }
                
                @Override
                public void onFailure(Exception e) {
                    LOG.error("Async bulk request failed", e);
                }
            });
            
            // 清空批量队列
            batch.clear();
            lastFlushTime = System.currentTimeMillis();
        }
    }
    
    /**
     * 配置构建器
     */
    public static class Builder {
        private String indexName;
        private int batchSize = 100;
        private long flushInterval = 5000;
        private Map<String, Object> esConfig;
        
        public Builder indexName(String indexName) {
            this.indexName = indexName;
            return this;
        }
        
        public Builder batchSize(int batchSize) {
            this.batchSize = batchSize;
            return this;
        }
        
        public Builder flushInterval(long flushInterval) {
            this.flushInterval = flushInterval;
            return this;
        }
        
        public Builder config(Map<String, Object> esConfig) {
            this.esConfig = esConfig;
            return this;
        }
        
        public ElasticsearchSink build() {
            if (indexName == null) {
                throw new IllegalArgumentException("Index name is required");
            }
            if (esConfig == null) {
                throw new IllegalArgumentException("ES config is required");
            }
            return new ElasticsearchSink(indexName, batchSize, flushInterval, esConfig);
        }
        
        public AsyncElasticsearchSink buildAsync() {
            if (indexName == null) {
                throw new IllegalArgumentException("Index name is required");
            }
            if (esConfig == null) {
                throw new IllegalArgumentException("ES config is required");
            }
            return new AsyncElasticsearchSink(indexName, batchSize, flushInterval, esConfig);
        }
    }
    
    /**
     * 工厂方法创建构建器
     */
    public static Builder builder() {
        return new Builder();
    }
} 