package com.deep.flink.mongodb;

import com.alibaba.fastjson.JSON;
import com.alibaba.fastjson.JSONObject;
import com.ververica.cdc.connectors.mongodb.MongoDBSource;
import com.ververica.cdc.debezium.JsonDebeziumDeserializationSchema;
import org.apache.flink.api.common.functions.FilterFunction;
import org.apache.flink.api.common.functions.MapFunction;
import org.apache.flink.api.common.functions.RuntimeContext;
import org.apache.flink.api.common.restartstrategy.RestartStrategies;
import org.apache.flink.elasticsearch7.shaded.org.apache.http.HttpHost;
import org.apache.flink.elasticsearch7.shaded.org.apache.http.auth.AuthScope;
import org.apache.flink.elasticsearch7.shaded.org.apache.http.auth.UsernamePasswordCredentials;
import org.apache.flink.elasticsearch7.shaded.org.apache.http.impl.client.BasicCredentialsProvider;
import org.apache.flink.elasticsearch7.shaded.org.apache.http.impl.nio.client.HttpAsyncClientBuilder;
import org.apache.flink.elasticsearch7.shaded.org.elasticsearch.ElasticsearchParseException;
import org.apache.flink.elasticsearch7.shaded.org.elasticsearch.action.ActionRequest;
import org.apache.flink.elasticsearch7.shaded.org.elasticsearch.action.delete.DeleteRequest;
import org.apache.flink.elasticsearch7.shaded.org.elasticsearch.action.update.UpdateRequest;
import org.apache.flink.elasticsearch7.shaded.org.elasticsearch.client.Requests;
import org.apache.flink.elasticsearch7.shaded.org.elasticsearch.client.RestClientBuilder;
import org.apache.flink.elasticsearch7.shaded.org.elasticsearch.common.util.concurrent.EsRejectedExecutionException;
import org.apache.flink.elasticsearch7.shaded.org.elasticsearch.common.xcontent.XContentType;

import org.apache.flink.streaming.api.CheckpointingMode;
import org.apache.flink.streaming.api.datastream.SingleOutputStreamOperator;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.streaming.api.functions.source.SourceFunction;
import org.apache.flink.streaming.api.windowing.time.Time;
import org.apache.flink.streaming.connectors.elasticsearch.ActionRequestFailureHandler;
import org.apache.flink.streaming.connectors.elasticsearch.ElasticsearchSinkFunction;
import org.apache.flink.streaming.connectors.elasticsearch.RequestIndexer;
import org.apache.flink.streaming.connectors.elasticsearch7.ElasticsearchSink;
import org.apache.flink.util.ExceptionUtils;
import org.bson.Document;
import org.bson.json.JsonMode;
import org.bson.json.JsonWriterSettings;

import java.nio.charset.StandardCharsets;
import java.util.*;

import static com.deep.flink.mongodb.Config.*;

public class FlinkCdcSyn_API {


    public static void main(String[] args) throws Exception {
        //1.构建flink环境及配置checkpoint
        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
//        env.setParallelism(1);
        env.setBufferTimeout(BUFFER_TIMEOUT_MS);
        env.enableCheckpointing(CHECKPOINT_INTERVAL_MS, CheckpointingMode.AT_LEAST_ONCE);
        env.getCheckpointConfig().setCheckpointTimeout(CHECKPOINT_TIMEOUT_MS);
        env.getCheckpointConfig().setMinPauseBetweenCheckpoints(CHECKPOINT_MIN_PAUSE_MS);
//        env.setRestartStrategy(RestartStrategies.failureRateRestart(3, Time.minutes(1), Time.seconds(10)));

        //2.通过FlinkCDC构建SourceFunction
        SourceFunction<String> mongoDBSourceFunction = MongoDBSource.<String>builder()
                .hosts(MONGODB_URL)
                .username(MONGODB_USER)
                .password(MONGODB_PWD)

                .databaseList(MONGODB_DATABASE)
                .collectionList(MONGODB_COLLECTION)

                .deserializer(new JsonDebeziumDeserializationSchema())
//                .deserializer(new CoustomParse())
                .build();

//3.数据初步处理，因为es的keyword最大不能超过32766

        SingleOutputStreamOperator<String> stream = env.addSource(mongoDBSourceFunction)
                .setParallelism(1)
                .name("mongo_to_es")
                .filter(new FilterFunction<String>() {
                    @Override
                    public boolean filter(String s) throws Exception {
                        try {
                            //判断是否是json格式，不是过滤掉
                            JSONObject obj = JSON.parseObject(s);
                            return true;
                        } catch (Exception e) {
                            System.out.println("json格式错误："+ s) ;
                            return  false;
                        }
                    }
                    //不处理会报whose UTF8 encoding is longer than the max length 32766，将过大的字段过滤掉
                }).map(new MapFunction<String, String>() {
                    @Override
                    public String map(String s) throws Exception {
                        JSONObject obj = JSON.parseObject(s);
                        String str = obj.getString("operationType");

                        if("insert".equals(str) || "update".equals(str)){
                            JSONObject obj1 = obj.getJSONObject("fullDocument");
                            if(obj1.toString().getBytes(StandardCharsets.UTF_8).length > 36000){


                                Set<Map.Entry<String, Object>> entries = obj1.entrySet();
                                Iterator<Map.Entry<String, Object>> iterator = entries.iterator();

                                while(iterator.hasNext()){
//                                    String s1 = iterator.next().toString();
//                                    System.out.println("iterator含义:" + s1);
                                    if(iterator.next().toString().getBytes("utf-8").length > 30000) {
                                        iterator.remove();
                                    }
                                }
                                obj.fluentPut("fullDocument",obj1.toString());
                            }
                        }

                        return obj.toString();
                    }
                });
        List<HttpHost> httpHosts = new ArrayList<>();


//4.对insert/update/delete分别处理
        httpHosts.add(new HttpHost(ES_URL, ES_PORT, "http"));
        ElasticsearchSink.Builder<String> esSinkBuilder = new ElasticsearchSink.Builder<String>(
                httpHosts, new ElasticsearchSinkFunction<String>() {

            public ActionRequest createIndexRequest(String element) {



                JSONObject obj = JSON.parseObject(element);
                //  System.out.println("create:" + obj.toString());
                String str = obj.getString("operationType");
                // Map<String, String> json = new HashMap<>();
                String id = null;

                try {
                    id = obj.getJSONObject("documentKey").getJSONObject("_id").getString("$oid");
                } catch (Exception e) {
                    try {
                        id = obj.getJSONObject("documentKey").getString("_id");
                    } catch (Exception ex) {
                        System.out.println("格式不对：" + obj);
                    }

                }

                if("insert".equals(str)){
                    JSONObject data = obj.getJSONObject("fullDocument");

                    data.remove("_id").toString();
                    //Object o = data.toJSON(JsonWriterSettings.builder().outputMode(JsonMode.RELAXED).build());
                    Document parse = Document.parse(data.toString());

                    String s = parse.toJson(JsonWriterSettings.builder().outputMode(JsonMode.RELAXED).build());
                    return Requests.indexRequest()
                            .index(ES_INDEX)
                            .id(id)
                            .source(s, XContentType.JSON);
                }else if("update".equals(str)){
                    JSONObject data = obj.getJSONObject("fullDocument");
                    data.remove("_id").toString();
                    Document parse = Document.parse(data.toString());
                    String s = parse.toJson(JsonWriterSettings.builder().outputMode(JsonMode.RELAXED).build());
                    return   new UpdateRequest(ES_INDEX,id).doc(s,XContentType.JSON).retryOnConflict(3);

                }else{
                    DeleteRequest deleteRequest = new DeleteRequest(ES_INDEX,id);
                    // System.out.println("delete语句：" + obj.toString());
                    return deleteRequest;
                }


            }
            @Override
            public void process(String element, RuntimeContext ctx, RequestIndexer indexer) {
                //System.out.println(element);
//                indexer.add(createIndexRequest(element));
                indexer.add(createIndexRequest(element));
//                indexer.add();


            }
        }

        );
        esSinkBuilder.setBulkFlushMaxActions(1);
        esSinkBuilder.setRestClientFactory(restClientBuilder -> {
            restClientBuilder.setHttpClientConfigCallback(new RestClientBuilder.HttpClientConfigCallback() {
                @Override
                public HttpAsyncClientBuilder customizeHttpClient(HttpAsyncClientBuilder httpAsyncClientBuilder) {
                    BasicCredentialsProvider credentialsProvider = new BasicCredentialsProvider();
                    credentialsProvider.setCredentials(AuthScope.ANY, new UsernamePasswordCredentials(ES_USER, ES_PWD));

                    return httpAsyncClientBuilder.setDefaultCredentialsProvider(credentialsProvider);
                }

            });
        });
        esSinkBuilder.setBulkFlushInterval(2000);
        //5.处理报错的数据，因为延迟产生的增加到队列重新写入，其他的过滤掉
        esSinkBuilder.setFailureHandler(new ActionRequestFailureHandler() {
            @Override
            public void onFailure(ActionRequest action, Throwable failure, int restStatusCode, RequestIndexer indexer) throws Throwable {
                if (ExceptionUtils.findThrowable(failure, EsRejectedExecutionException.class).isPresent()) {
                    // full queue; re-add document for indexing
                    indexer.add(action);
                } else if (ExceptionUtils.findThrowable(failure, ElasticsearchParseException.class).isPresent()) {
                    // malformed document; simply drop request without failing sink
                } else {
                    // for all other failures, fail the sink
                    // here the failure is simply rethrown, but users can also choose to throw custom exceptions
                    System.out.println("失败语句："+action.toString());

                }
            }
        });
        stream.addSink(esSinkBuilder.build());

        env.execute();


    }
}
