package sink;

import org.apache.flink.api.common.functions.RuntimeContext;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.streaming.connectors.elasticsearch.ElasticsearchSinkFunction;
import org.apache.flink.streaming.connectors.elasticsearch.RequestIndexer;
import org.apache.flink.streaming.connectors.elasticsearch6.ElasticsearchSink;
import org.apache.http.HttpHost;
import org.elasticsearch.action.index.IndexRequest;
import org.elasticsearch.client.Requests;
import org.junit.Test;

import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;

public class Flink_sink_ES {

    public static class MyElastic implements ElasticsearchSinkFunction<String> {

        @Override
        public void process(String element, RuntimeContext ctx, RequestIndexer indexer) {
            String[] split = element.split(",");
            Map<String, String> map = new HashMap<>();
            map.put(split[0], split[2]);

            IndexRequest source = Requests.indexRequest()
                    .index("sensor")
                    .type("_doc")
                    .source(map);

            indexer.add(source);
        }
    }

    @Test
    public void test() throws Exception {
        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment().setParallelism(1);

        //写入ES
        List<HttpHost> httpHosts = new ArrayList<>();
        httpHosts.add(new HttpHost("node", 9200));

        ElasticsearchSink.Builder<String> builder = new ElasticsearchSink.Builder<>(httpHosts, new MyElastic());
        //设置刷写时机
        builder.setBulkFlushMaxActions(1);

        //创建es
        ElasticsearchSink<String> ElasticBuild = builder.build();

        env.readTextFile("input/sensorreading.txt")
                .addSink(ElasticBuild);

        env.execute();
    }
}
