package com.atguigu.champter5.Flink05.sink;

import com.atguigu.beans.WaterSensor;
import com.atguigu.source.RandomWaterSenson;
import org.apache.flink.api.common.functions.RuntimeContext;
import org.apache.flink.configuration.Configuration;
import org.apache.flink.streaming.api.datastream.DataStreamSource;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.streaming.connectors.elasticsearch.ElasticsearchSinkFunction;
import org.apache.flink.streaming.connectors.elasticsearch.RequestIndexer;
import org.apache.flink.streaming.connectors.elasticsearch6.ElasticsearchSink;
import org.apache.http.HttpHost;
import org.elasticsearch.action.index.IndexRequest;
import org.elasticsearch.client.Requests;

import java.net.InetAddress;
import java.net.InetSocketAddress;
import java.net.UnknownHostException;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;


public class Flink04_Sink_Elasticsearch {
    public static void main(String[] args) throws UnknownHostException {
/*        Configuration conf = new Configuration();
        conf.setInteger("rest.port", 12121);

        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment(conf);
        env.setParallelism(2);

        ArrayList<WaterSensor> waterSensors = new ArrayList<>();
        waterSensors.add(new WaterSensor("sensor_1", 1607527992000L, 20));
        waterSensors.add(new WaterSensor("sensor_2", 1607527995000L, 30));
        waterSensors.add(new WaterSensor("sensor_1", 1607527996000L, 60));
        waterSensors.add(new WaterSensor("sensor_2", 1607527998000L, 80));
        waterSensors.add(new WaterSensor("sensor_1", 1607527991000L, 20));
        waterSensors.add(new WaterSensor("sensor_2", 1607527985000L, 30));
        waterSensors.add(new WaterSensor("sensor_1", 1607527986000L, 60));
        waterSensors.add(new WaterSensor("sensor_2", 1607527988000L, 80));

        ArrayList<HttpHost> httpHosts = new ArrayList<>();
        httpHosts.add(new HttpHost("hadoop102",9200,"http"));

        ElasticsearchSink<WaterSensor> elasticsearchSink = new ElasticsearchSink.Builder<WaterSensor>(
                httpHosts, new ElasticsearchSinkFunction<WaterSensor>() {
            @Override
            public void process(WaterSensor waterSensor, RuntimeContext runtimeContext, RequestIndexer requestIndexer) {
                HashMap<String, Integer> map = new HashMap<>();
                map.put(waterSensor.getId(), waterSensor.getVc());
                IndexRequest source = Requests.indexRequest().index("sensor").type("_doc").source(map);
                requestIndexer.add(source);
            }
        }).build();

        env.fromCollection(waterSensors).addSink(elasticsearchSink);

        try {
            env.execute();
        } catch (Exception e) {
            e.printStackTrace();
        }*/

        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
        env.setParallelism(1);

        DataStreamSource<WaterSensor> ds = env.addSource(new RandomWaterSenson());

        ArrayList<HttpHost> httpHosts = new ArrayList<>();
        httpHosts.add(new HttpHost("hadoop102", 9200, "http"));

        ElasticsearchSink.Builder<WaterSensor> esBuilder = new ElasticsearchSink.Builder<>(httpHosts, new ElasticsearchSinkFunction<WaterSensor>() {
            @Override
            public void process(WaterSensor waterSensor, RuntimeContext runtimeContext, RequestIndexer requestIndexer) {
                HashMap<String, Integer> data = new HashMap<>();
                data.put(waterSensor.getId(), waterSensor.getVc());
                IndexRequest indexRequest = Requests
                        .indexRequest()
                        .index("sensor")
                        .type("_doc")
                        .source(data);
                requestIndexer.add(indexRequest);

            }
        });

        ds.addSink(esBuilder.build());



/*

        Map<String, String> config = new HashMap<>();
        config.put("cluster.name", "my-cluster-name");
        // This instructs the sink to emit after every element, otherwise they would be buffered
        config.put("bulk.flush.max.actions", "1");

        List<InetSocketAddress> transportAddresses = new ArrayList<>();
        transportAddresses.add(new InetSocketAddress(InetAddress.getByName("hadoop102"), 9200));
        transportAddresses.add(new InetSocketAddress(InetAddress.getByName("hadoop103"), 9200));
        transportAddresses.add(new InetSocketAddress(InetAddress.getByName("hadoop104"), 9200));

        env
                .addSource(new RandomWaterSenson())
                .addSink(new ElasticsearchSink<>(config, transportAddresses, new ElasticsearchSinkFunction<String>() {
                    public IndexRequest createIndexRequest(String element) {
                        Map<String, String> json = new HashMap<>();
                        json.put("data", element);

                        return Requests.indexRequest()
                                .index("my-index")
                                .type("my-type")
                                .source(json);
                    }

                    @Override
                    public void process(String element, RuntimeContext ctx, RequestIndexer indexer) {
                        indexer.add(createIndexRequest(element));
                    }
                }));*/

        try {
            env.execute();
        } catch (Exception e) {
            e.printStackTrace();
        }
    }


    }
