package com.zyx.flinkdemo.stream.sink;

import com.alibaba.fastjson.JSONObject;
import com.zyx.flinkdemo.pojo.User;
import lombok.extern.slf4j.Slf4j;
import org.apache.flink.api.common.functions.MapFunction;
import org.apache.flink.api.common.functions.RuntimeContext;
import org.apache.flink.streaming.api.datastream.DataStream;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.streaming.connectors.elasticsearch.ActionRequestFailureHandler;
import org.apache.flink.streaming.connectors.elasticsearch.ElasticsearchSinkFunction;
import org.apache.flink.streaming.connectors.elasticsearch.RequestIndexer;
import org.apache.flink.streaming.connectors.elasticsearch6.ElasticsearchSink;
import org.apache.flink.util.ExceptionUtils;
import org.apache.http.Header;
import org.apache.http.HttpHost;
import org.apache.http.impl.nio.client.HttpAsyncClientBuilder;
import org.apache.http.impl.nio.reactor.IOReactorConfig;
import org.apache.http.message.BasicHeader;
import org.elasticsearch.ElasticsearchParseException;
import org.elasticsearch.action.ActionRequest;
import org.elasticsearch.action.index.IndexRequest;
import org.elasticsearch.client.Requests;
import org.elasticsearch.client.RestClientBuilder;
import org.elasticsearch.common.util.concurrent.EsRejectedExecutionException;

import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;

/**
 * @author Yaxi.Zhang
 * @since 2021/7/2 14:54
 * desc: Flink的ESSink案例
 * reference: https://ci.apache.org/projects/flink/flink-docs-release-1.13/zh/docs/connectors/datastream/elasticsearch/
 */
@Slf4j
public class ElasticSearchSinkDemo {
    public static void main(String[] args) throws Exception {
        // 创建环境
        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
        DataStream<String> input = env
                .socketTextStream("localhost", 7777);

        // 指定es地址, 可以指定多个地址
        List<HttpHost> httpHosts = new ArrayList<>();
        httpHosts.add(new HttpHost("localhost", 9200, "http"));

        // use a ElasticsearchSink.Builder to create an ElasticsearchSink
        ElasticsearchSink.Builder<String> esSinkBuilder = new ElasticsearchSink.Builder<>(
                httpHosts,
                new ElasticsearchSinkFunction<String>() {
                    public IndexRequest createIndexRequest(String element) {
                        Map<String, String> json = new HashMap<>(16);
                        json.put("data", element);

                        return Requests.indexRequest()
                                .index("my-index")
                                .type("my-type")
                                .source(json);
                    }

                    @Override
                    public void process(String element, RuntimeContext ctx, RequestIndexer indexer) {
                        indexer.add(createIndexRequest(element));
                    }
                }
        );

        // 配置批量写入的大小, 设置为1时表示来一条写一条, 如果超过1则会缓存到指定条数后批量写入
        esSinkBuilder.setBulkFlushMaxActions(1);

        // 使用自定义配置 创建 ElasticSearch RestClientFactory的REST Client
        // 配置文件参考: https://www.elastic.co/guide/en/elasticsearch/reference/current/index.html
        esSinkBuilder.setRestClientFactory(
                restClientBuilder -> {
                    restClientBuilder
                            // 设置头文件
                            /*.setDefaultHeaders(new Header[]{new BasicHeader("Authorization",
                                                                "Bearer u6iuAxZ0RG1Kcm5jVFI4eU4tZU9aVFEwT2F3")})*/
                            // 设置重试的最大超时时间(ms)
                            .setMaxRetryTimeoutMillis(1000)
                            // 设置使用http客户端时的前缀
                            // .setPathPrefix("/my/path")
                            // 异步连接数配置
                            /*.setHttpClientConfigCallback(
                                    httpClientBuilder -> httpClientBuilder.setDefaultIOReactorConfig(
                                            IOReactorConfig.custom()
                                                    // 线程数配置
                                                    .setIoThreadCount(100)
                                                    // 设置连接超时时间
                                                    .setConnectTimeout(10000)
                                                    // 读取数据时阻塞链路的超时时间
                                                    .setSoTimeout(10000)
                                                    .build()))*/
                    ;
                }
        );

        // 设置失败策略
        esSinkBuilder.setFailureHandler((ActionRequestFailureHandler) (action, failure, restStatusCode, indexer) -> {
            if (ExceptionUtils.findThrowable(failure, EsRejectedExecutionException.class).isPresent()) {
                // 当由于队列满溢时造成的Exception时, 重新连接后重写
                indexer.add(action);
            } else if (ExceptionUtils.findThrowable(failure, ElasticsearchParseException.class).isPresent()) {
                // 当文件格式错误时, 删除请求即可, 尽量避免接收器失败
                log.error("++++++++++++++++++++插入ES的数据格式有误");
            } else {
                // for all other failures, fail the sink
                // here the failure is simply rethrown, but users can also choose to throw custom exceptions
                throw failure;
            }
        });

        // 创建ESSink
        input.addSink(esSinkBuilder.build());

        // 执行环境
        env.execute("flink es demo");
    }
}
