package com.bigdata.es.flink2es;

import com.alibaba.fastjson.JSONObject;
import com.bigdata.es.conf.ConfigurationManager;
import com.bigdata.es.conf.KafkaProperties;
import com.bigdata.es.elasticsearch.ElasticSearchUtils;
import org.apache.flink.api.common.functions.MapFunction;
import org.apache.flink.api.common.serialization.SimpleStringSchema;
import org.apache.flink.streaming.api.datastream.DataStream;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.streaming.connectors.elasticsearch.ElasticsearchSinkFunction;
import org.apache.flink.streaming.connectors.elasticsearch6.ElasticsearchSink;
import org.apache.flink.streaming.connectors.kafka.FlinkKafkaConsumer;
import org.apache.http.HttpHost;
import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger;
import org.elasticsearch.action.admin.indices.exists.indices.IndicesExistsResponse;
import org.elasticsearch.client.IndicesAdminClient;
import org.elasticsearch.client.Requests;
import org.elasticsearch.client.transport.TransportClient;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentFactory;
import org.elasticsearch.common.xcontent.XContentType;

import java.io.IOException;
import java.util.ArrayList;
import java.util.List;
import java.util.Properties;

public class FlinkSinkToES {

    private static final Logger logger = LogManager.getLogger(FlinkSinkToES.class);
    private static TransportClient client = null;

    public static void main(String[] args) {
        //流处理执行环境
        final StreamExecutionEnvironment senv = StreamExecutionEnvironment.getExecutionEnvironment();
        //设置并行度
        senv.setParallelism(4);

        //获取elasticsearch客户端对象
        client = ElasticSearchUtils.getClient();

        //创建索引
        createIndex();

        //获取kafka生产者产生的记录到flink进行消费
        Properties props = KafkaProperties.getKafkaProperties();

        DataStream<String> stream = senv.addSource(new FlinkKafkaConsumer<>(
                KafkaProperties.getTopic(),
                new SimpleStringSchema(),
                props));
        //stream.print();
        DataStream<JSONObject> transStream = stream.map((MapFunction<String, JSONObject>) value -> {
            System.out.println("当前正在处理" + value);
            return JSONObject.parseObject(value);
        });

        //将数据记录写入elasticsearch
        List<HttpHost> httpHosts = new ArrayList<>();
        httpHosts.add(new HttpHost(ConfigurationManager.getProperty("es_host"),
                ConfigurationManager.getInteger("es_port"), "http"));
        ElasticsearchSink.Builder<JSONObject> esSinkBuilder = new ElasticsearchSink.Builder<>(
                httpHosts,
                (ElasticsearchSinkFunction<JSONObject>) (jsonObject, runtimeContext, requestIndexer) -> {
                    //将数据保存在elasticsearch中名称为index_customer的索引中
                    requestIndexer.add(Requests.indexRequest()
                            .index(ConfigurationManager.getProperty("es_index"))
                            .type(ConfigurationManager.getProperty("es_type"))
                            .id(String.valueOf(jsonObject.getLong("id")))
                            .source(jsonObject));
                }
        );
        //设置批量写数据的缓冲区大小
        esSinkBuilder.setBulkFlushMaxActions(50);

        //把转换后数据写入elasticsearch
        transStream.addSink(esSinkBuilder.build());

        //执行
        try{
            senv.execute("FlinkSinkToES");
        }catch (Exception e){
            logger.error("An error occurred.", e);
        }
    }

    //利用es客户端查询索引并创建
    private static void createIndex(){
        String index = ConfigurationManager.getProperty("es_index");
        String typename = ConfigurationManager.getProperty("es_type");
        IndicesAdminClient indicesAdminClient = client.admin().indices();
        IndicesExistsResponse response = indicesAdminClient.prepareExists(index).get();
        try{
            XContentBuilder builder = XContentFactory.jsonBuilder()
                    .startObject()
                    .startObject("properties")
                    .startObject("id")
                    .field("type", "long")
                    .endObject()
                    .startObject("name")
                    .field("type", "text")
                    .field("analyzer", "ik_max_word")
                    .endObject()
                    .startObject("address")
                    .startObject("properties")
                    .startObject("id")
                    .field("type", "integer")
                    .endObject()
                    .startObject("country")
                    .field("type", "keyword")
                    .endObject()
                    .startObject("city")
                    .field("type", "keyword")
                    .endObject()
                    .endObject()
                    .endObject()
                    .startObject("description")
                    .field("type", "text")
                    .field("analyzer", "ik_max_word")
                    .endObject()
                    .endObject()
                    .endObject();
            if(!response.isExists()){
                //创建索引
                indicesAdminClient.prepareCreate(index).get();
                //给索引添加mapping
                indicesAdminClient.preparePutMapping(index).setType(typename)
                        .setSource(builder).get();
                logger.info("index {} created", index);
            }else {
                logger.info("index {} already exists", index);
            }
        }catch (IOException e){
            logger.error("An error occurred.", e);
        }
        //关闭客户端
        client.close();
    }

}
