package com.atguigu.flink.demo04;

import lombok.SneakyThrows;
import org.apache.flink.api.common.functions.RuntimeContext;
import org.apache.flink.api.common.typeinfo.Types;
import org.apache.flink.streaming.api.datastream.DataStreamSource;
import org.apache.flink.streaming.api.datastream.SingleOutputStreamOperator;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.streaming.api.functions.source.SocketTextStreamFunction;
import org.apache.flink.streaming.connectors.elasticsearch.ElasticsearchSinkFunction;
import org.apache.flink.streaming.connectors.elasticsearch.RequestIndexer;
import org.apache.http.HttpHost;
import org.elasticsearch.action.index.IndexRequest;
import org.elasticsearch.client.Requests;

import java.lang.reflect.Field;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;

/**
 * @author admin
 * @date 2021/8/10
 */
public class ElasticsearchSink {

    public static void main(String[] args) throws Exception {

        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();

        /**
         * 从网络中读取数据
         * 参数：ip,段口，分隔符，重试次数
         */
        SocketTextStreamFunction socketText = new SocketTextStreamFunction("hadoop102", 9999, "\n", 3);
        DataStreamSource<String> source = env.addSource(socketText);


        // 将数据解析成Bean
        SingleOutputStreamOperator<UserBean> map = source.map(e -> new UserBean(e.split(",")))
                .returns(Types.POJO(UserBean.class));


        // es 连接
        List<HttpHost> httpHosts=new ArrayList<>();
        httpHosts.add(new HttpHost("hadoop102",9200,"http"));
        httpHosts.add(new HttpHost("hadoop103",9200,"http"));
        httpHosts.add(new HttpHost("hadoop104",9200,"http"));


        // es 写入数据
        org.apache.flink.streaming.connectors.elasticsearch6.ElasticsearchSink.Builder builder = new org.apache.flink.streaming.connectors.elasticsearch6.ElasticsearchSink.Builder(
                httpHosts,
                new ElasticsearchSinkFunction<UserBean>() {

                    public IndexRequest createIndexRequest(UserBean element) throws IllegalAccessException {
                        return Requests.indexRequest()
                                .index("user_index")
                                .type("_doc")
                                .source(objectToMap(element));
                    }

                    @SneakyThrows
                    @Override
                    public void process(UserBean element, RuntimeContext ctx, RequestIndexer indexer) {
                        indexer.add(createIndexRequest(element));
                    }
                }
        );
        // 指定攒批策略
        builder.setBulkFlushInterval(1000);

        org.apache.flink.streaming.connectors.elasticsearch6.ElasticsearchSink sink = builder.build();

        map.addSink(sink);

        env.execute();


    }

    /**
     * 将Object对象里面的属性和值转化成Map对象
     *
     * @param obj
     * @return
     * @throws IllegalAccessException
     */
    public static Map<String, Object> objectToMap(Object obj) throws IllegalAccessException {
        Map<String, Object> map = new HashMap<>();
        Class<?> clazz = obj.getClass();
        for (Field field : clazz.getDeclaredFields()) {
            field.setAccessible(true);
            String fieldName = field.getName();
            Object value = field.get(obj);
            map.put(fieldName, value);
        }
        return map;
    }
}
