package com.atguigu.flink.chapter05.sink;

import com.alibaba.fastjson.JSON;
import com.atguigu.flink.bean.WaterSensor;
import org.apache.flink.api.common.functions.MapFunction;
import org.apache.flink.api.common.functions.RuntimeContext;
import org.apache.flink.configuration.Configuration;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.streaming.connectors.elasticsearch.ElasticsearchSinkFunction;
import org.apache.flink.streaming.connectors.elasticsearch.RequestIndexer;
import org.apache.flink.streaming.connectors.elasticsearch6.ElasticsearchSink;
import org.apache.http.HttpHost;
import org.elasticsearch.action.index.IndexRequest;
import org.elasticsearch.client.Requests;
import org.elasticsearch.common.xcontent.XContentType;

import java.util.Arrays;
import java.util.List;

/**
 * @Author lzc
 * @Date 2022/7/5 10:19
 */
public class Flink03_Sink_Es {
    public static void main(String[] args) {
        Configuration conf = new Configuration();
        conf.setInteger("rest.port", 2000);
        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment(conf);
        env.setParallelism(1);
    
    
        List<HttpHost> hosts = Arrays.asList(
            new HttpHost("hadoop162", 9200),
            new HttpHost("hadoop163", 9200),
            new HttpHost("hadoop164", 9200)
        );
        ElasticsearchSink.Builder<WaterSensor> builder = new ElasticsearchSink.Builder<WaterSensor>(
            hosts,
            new ElasticsearchSinkFunction<WaterSensor>() {
                @Override
                public void process(WaterSensor element,
                                    RuntimeContext ctx,
                                    RequestIndexer indexer) {
                    String result = JSON.toJSONString(element);
    
                    IndexRequest index = Requests
                        .indexRequest("sensor")
                        .type("_doc")
                        .id(element.getId())
                        .source(result, XContentType.JSON);
    
                    indexer.add(index);
        
                }
            }
        );
        
        builder.setBulkFlushInterval(2000);
        builder.setBulkFlushMaxActions(2);
        builder.setBulkFlushMaxSizeMb(1);
        
        
    
    
        env
            //.readTextFile("input/sensor.txt")
            .socketTextStream("hadoop162", 9999)
            .map(new MapFunction<String, WaterSensor>() {
                @Override
                public WaterSensor map(String value) throws Exception {
                    String[] data = value.split(",");
                
                
                    return new WaterSensor(data[0], Long.valueOf(data[1]), Integer.valueOf(data[2]));
                }
            })
            .keyBy(WaterSensor::getId)
            .sum("vc")
            .addSink(builder.build());
            
        try {
            env.execute();
        } catch (Exception e) {
            e.printStackTrace();
        }
    }
}
