package com.my;

import com.alibaba.fastjson.JSONObject;
import com.ververica.cdc.connectors.mysql.source.MySqlSource;
import com.ververica.cdc.connectors.mysql.table.StartupOptions;
import com.ververica.cdc.debezium.StringDebeziumDeserializationSchema;
import org.apache.flink.api.common.eventtime.WatermarkStrategy;
import org.apache.flink.api.common.functions.RuntimeContext;
import org.apache.flink.streaming.api.datastream.DataStreamSource;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.streaming.connectors.elasticsearch.ElasticsearchSinkFunction;
import org.apache.flink.streaming.connectors.elasticsearch.RequestIndexer;
import org.apache.flink.streaming.connectors.elasticsearch7.ElasticsearchSink;
import org.apache.flink.streaming.connectors.elasticsearch7.RestClientFactory;
import org.apache.http.HttpHost;
import org.apache.http.auth.AuthScope;
import org.apache.http.auth.UsernamePasswordCredentials;
import org.apache.http.impl.client.BasicCredentialsProvider;
import org.apache.http.impl.nio.client.HttpAsyncClientBuilder;
import org.elasticsearch.action.index.IndexRequest;
import org.elasticsearch.client.Requests;
import org.elasticsearch.client.RestClientBuilder;
import org.elasticsearch.common.xcontent.XContentType;

import java.util.ArrayList;
import java.util.Date;

public class App3 {
    public static void main(String[] args) throws Exception {
        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
        env.setParallelism(1);

        DataStreamSource<String> streamSource = env.socketTextStream("192.168.80.101",7778);
//        streamSource.print();

        //输出算子
        ArrayList<HttpHost> httpHosts = new ArrayList<>();
        httpHosts.add(new HttpHost("192.168.80.106",9200,"http"));
        ElasticsearchSinkFunction<String> elasticsearchSinkFunction = new ElasticsearchSinkFunction<String>() {
            @Override
            public void process(String element, RuntimeContext runtimeContext, RequestIndexer requestIndexer) {
                System.out.println(element);
                JSONObject jsonObject = new JSONObject();
                jsonObject.put("name", element);
                jsonObject.put("age", 10);
                jsonObject.put("birthday", new Date());
                IndexRequest source = Requests.indexRequest()
                        .index("person")
//                        .type("_doc")
                        .id(element)
                        .source(jsonObject, XContentType.JSON);
                requestIndexer.add(source);
            }
        };

        ElasticsearchSink.Builder<String> esBuilser = new ElasticsearchSink.Builder<>(httpHosts, elasticsearchSinkFunction);
        // 每个请求最多发送的文档数量
        esBuilser.setBulkFlushMaxActions(1);
        // 每次发送请求的时间间隔
        esBuilser.setBulkFlushInterval(1000);

        ElasticsearchSink<String> esSink = esBuilser.build();


        streamSource.addSink(esSink).name("sink_es");
        env.execute("mysql_es");
    }
}
