package com.flink.sink.elasticsearch;

import com.flink.entity.User;
import org.apache.flink.api.common.functions.RuntimeContext;
import org.apache.flink.streaming.api.datastream.DataStreamSource;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.streaming.connectors.elasticsearch.ElasticsearchSinkFunction;
import org.apache.flink.streaming.connectors.elasticsearch.RequestIndexer;
import org.apache.flink.streaming.connectors.elasticsearch7.ElasticsearchSink;
import org.apache.http.HttpHost;
import org.elasticsearch.action.index.IndexRequest;
import org.elasticsearch.client.Requests;

import java.time.LocalDate;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;

/**
 * 描述:
 * 输出到es7
 *
 * @author yanzhengwu
 * @create 2022-07-23 22:51
 */
public class SinkToElasticsearch {
    public static void main(String[] args) throws Exception {

        //声明执行环境
        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
        //测试为了保证元素顺序并行度设置为1，可以理解为单线程执行
        env.setParallelism(1);

        DataStreamSource<User> stream = env.fromElements(
                new User("用户2", "家电", LocalDate.of(2022, 6, 2), 12, 10000L),
                new User("用户1", "零食", LocalDate.of(2022, 1, 1), 11, 10000L),
                new User("用户2", "洗发水", LocalDate.of(2022, 6, 2), 13, 10000L),
                new User("用户3", "椅子", LocalDate.of(2022, 6, 3), 14, 10000L),
                new User("用户3", "手机", LocalDate.of(2022, 6, 3), 15, 10000L),
                new User("用户1", "电脑", LocalDate.of(2022, 6, 2), 13, 10000L),
                new User("用户1", "手机", LocalDate.of(2022, 6, 16), 16, 10000L),
                new User("用户2", "洗发水", LocalDate.of(2022, 6, 2), 13, 60000L)
        );

        HttpHost httpHost = new HttpHost("192.168.43.10", 8989, "http");
        List<HttpHost> hostList = new ArrayList<>();
        hostList.add(httpHost);

        /*ElasticsearchSink<User> elasticsearchSink = new Elasticsearch7SinkBuilder<User>()
                // 下面的设置使 sink 在接收每个元素之后立即提交，否则这些元素将被缓存起来
                .setBulkFlushMaxActions(1)
                //es主机信息
                .setHosts(httpHost)
                //输出逻辑
                .setEmitter(
                        (element, context, indexer) ->
                                indexer.add(createIndexRequest(element)))
                .build();

        stream.sinkTo(elasticsearchSink);*/

        //下面更改为1.14.2版本 TODO 尽量使用显示声明的泛型或者匿名类，flink便于解析泛型否则报错
        ElasticsearchSink<User> elasticsearchSink = new ElasticsearchSink.Builder<User>(hostList,
                new ElasticsearchSinkFunction<User>() {
                    @Override
                    public void open() throws Exception {

                    }

                    @Override
                    public void close() throws Exception {

                    }

                    @Override
                    public void process(User element, RuntimeContext ctx, RequestIndexer indexer) {

                        //TODO 这里是真正的输出逻辑
                        indexer.add(createIndexRequest(element));
                    }
                }).build();

        stream.addSink(elasticsearchSink);

        env.execute();
    }

    /**
     * 将对象转换成es所需要的数据
     *
     * @param user
     * @return
     */
    private static IndexRequest createIndexRequest(User user) {
        Map<String, Object> json = new HashMap<>();
        json.put(user.getName(), user.getProd());

        return Requests.indexRequest()
                //创建索引
                .index("my-index")
                //可以是数据的主键id
                .id(String.valueOf(user.hashCode()))
                //被存储的原始数据
                .source(json);
    }
}

