package com.bingxu.flink.chapter02.sink;

import com.bingxu.flink.bean.WaterSensor;
import org.apache.flink.api.common.functions.MapFunction;
import org.apache.flink.api.common.functions.RuntimeContext;
import org.apache.flink.streaming.api.datastream.DataStreamSource;
import org.apache.flink.streaming.api.datastream.SingleOutputStreamOperator;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.streaming.connectors.elasticsearch.ElasticsearchSinkFunction;
import org.apache.flink.streaming.connectors.elasticsearch.RequestIndexer;
import org.apache.flink.streaming.connectors.elasticsearch6.ElasticsearchSink;
import org.apache.flink.streaming.connectors.redis.RedisSink;
import org.apache.flink.streaming.connectors.redis.common.config.FlinkJedisConfigBase;
import org.apache.flink.streaming.connectors.redis.common.config.FlinkJedisPoolConfig;
import org.apache.flink.streaming.connectors.redis.common.mapper.RedisCommand;
import org.apache.flink.streaming.connectors.redis.common.mapper.RedisCommandDescription;
import org.apache.flink.streaming.connectors.redis.common.mapper.RedisMapper;
import org.apache.http.HttpHost;
import org.elasticsearch.action.index.IndexRequest;
import org.elasticsearch.client.Requests;

import java.util.Arrays;
import java.util.HashMap;
import java.util.List;

/**
 * @author :BingXu
 * @description :TODO
 * @date :2021/8/10 11:48
 * @modifier :
 */

public class Trans_ESSink {
    public static void main(String[] args) throws Exception {
        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
        DataStreamSource<String> streamDS = env.socketTextStream("localhost", 9999);
        SingleOutputStreamOperator<WaterSensor> waterSensorDS = streamDS.map(new MapFunction<String, WaterSensor>() {
            @Override
            public WaterSensor map(String s) throws Exception {
                String[] fields = s.split(",");
                return new WaterSensor(fields[0], Long.parseLong(fields[1]), Integer.parseInt(fields[2]));
            }
        });


        List<HttpHost> httpHosts = Arrays.asList(
                new HttpHost("hadoop102",9200),
                new HttpHost("hadoop103",9200),
                new HttpHost("hadoop104",9200));

        ElasticsearchSink.Builder<WaterSensor> esSinkBuilder = new ElasticsearchSink.Builder<>(
                httpHosts,
                new ElasticsearchSinkFunction<WaterSensor>() {
                    /**
                     * 该方法处理写入ES的处理
                     *
                     * @param waterSensor
                     * @param runtimeContext
                     * @param requestIndexer
                     */
                    @Override
                    public void process(WaterSensor waterSensor, RuntimeContext runtimeContext, RequestIndexer requestIndexer) {
                        // 组装数据，插入的数据是什么样子的
                        HashMap<String, String> dataMap = new HashMap<>();
                        dataMap.put("data", waterSensor.toString());

                        IndexRequest indexRequest = Requests.indexRequest()
                                .index("flink210323")
                                .type("_doc")
                                .source(dataMap);

                        requestIndexer.add(indexRequest);
                    }
                });
        esSinkBuilder.setBulkFlushMaxActions(1);
        ElasticsearchSink<WaterSensor> esSink = esSinkBuilder.build();

        waterSensorDS.addSink(esSink);
        env.execute();
    }
}
