package org.chenbingkang.flink;

import org.apache.flink.api.common.functions.RichMapFunction;
import org.apache.flink.api.common.functions.RuntimeContext;
import org.apache.flink.api.common.serialization.SimpleStringSchema;
import org.apache.flink.configuration.Configuration;
import org.apache.flink.streaming.api.datastream.DataStreamSource;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.streaming.connectors.elasticsearch.RequestIndexer;
import org.apache.flink.streaming.connectors.elasticsearch6.ElasticsearchSink;
import org.apache.flink.streaming.connectors.kafka.FlinkKafkaConsumer011;
import org.apache.http.HttpHost;
import org.elasticsearch.client.Requests;
import org.elasticsearch.common.xcontent.XContentType;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;

import java.util.ArrayList;
import java.util.List;
import java.util.Properties;

public class Application {

    private static Logger logger= LoggerFactory.getLogger(Application.class);

    public static void main(String ... args) throws Exception{
        Properties props = new Properties();
        props.put("bootstrap.servers", "192.168.180.128:9092");
        props.put("group.id", "test");
        props.put("key.deserializer", "org.apache.kafka.common.serialization.StringDeserializer");  //key 反序列化
        props.put("value.deserializer", "org.apache.kafka.common.serialization.StringDeserializer");
        props.put("auto.offset.reset", "latest");

        StreamExecutionEnvironment env =  StreamExecutionEnvironment.getExecutionEnvironment();
        env.enableCheckpointing(12);

//        DataStreamSource<String> dataStreamSource = env.readTextFile("D:\\test\\test.txt");

        DataStreamSource<String> dataStreamSource = env.addSource(new FlinkKafkaConsumer011<>(
                "test",  //kafka topic
                new SimpleStringSchema(),  // String 序列化
                props)).setParallelism(1);

        dataStreamSource.map(new RichMapFunction<String, Object>() {

            @Override
            public void open(Configuration parameters) throws Exception {
            //    getRuntimeContext().getState()
            }


            @Override
            public Object map(String s) throws Exception {
                return null;
            }
        });



        List<HttpHost> esAddresses=new ArrayList<>();
        HttpHost httpHost=new HttpHost("192.168.180.128",9200);
        esAddresses.add(httpHost);
        ElasticsearchSink.Builder<String> esSinkBuilder = new ElasticsearchSink.Builder<>(esAddresses, (String element, RuntimeContext runtimeContext, RequestIndexer requestIndexer) -> {
            System.out.println("element:"+element);
            requestIndexer.add(Requests.indexRequest()
                    .index("test")
                    .type("doc")
                    .source(element, XContentType.JSON));
        });
        esSinkBuilder.setBulkFlushMaxActions(1);
        dataStreamSource.addSink(esSinkBuilder.build());
       // dataStreamSource.print(); //把从 kafka 读取到的数据打印在控制台

        env.execute("Flink add data source");

    }
}
