package com.dtwave.cfstreaming.sink;

import com.dtwave.cfstreaming.bean.ResultData;
import com.dtwave.cfstreaming.connection.ElasticConnector;
import com.dtwave.cfstreaming.constant.SQLFileConstants;
import com.dtwave.cfstreaming.process.StreamDataExecutor;
import com.dtwave.cfstreaming.sink.function.EsSinkFunction;
import com.dtwave.cfstreaming.sink.function.SinkFlatMap;
import com.dtwave.cfstreaming.utils.FileUtils;
import com.dtwave.param.EsparamObj;
import org.apache.flink.streaming.connectors.elasticsearch7.ElasticsearchSink;
import org.apache.flink.table.api.Table;
import org.apache.flink.table.api.bridge.java.StreamTableEnvironment;

public class StreamDataSink {
    public static void execute(StreamTableEnvironment tableEnv, EsparamObj esParamObj) {
        //构建输出数据需要的builder
        ElasticsearchSink.Builder<ResultData> esBuilder = getEsBuilder(esParamObj);
        //得到结果数据并输出
        String sql = FileUtils.getSqlStream(StreamDataExecutor.class, SQLFileConstants.RESULT_SQL_FILE);
        Table tableResult = tableEnv.sqlQuery(sql);
        //将表转化为流得到的是一个tuple<>类型，f0为boole类型，f1为具体的数据，f0为true为判断条件
        tableEnv.toRetractStream(tableResult,ResultData.class)
                .flatMap(new SinkFlatMap())
                .addSink(esBuilder.build())
                .name("SINK TEST");
    }
    /**
     * 获取flink sink数据到es所需要的参数
     * @param esParamObj
     * @return
     */
    private static ElasticsearchSink.Builder<ResultData> getEsBuilder(EsparamObj esParamObj){
        ElasticsearchSink.Builder<ResultData> Builder = new ElasticsearchSink.Builder<>(
                ElasticConnector.parse(esParamObj.getEsUrl()),
                new EsSinkFunction(esParamObj)
        );

        //builder参数配置
        Builder.setBulkFlushInterval(Integer.parseInt(esParamObj.getEsIntervalMs()));
        Builder.setBulkFlushMaxActions(Integer.parseInt(esParamObj.getEsMaxAction()));
        Builder.setBulkFlushBackoffRetries(Integer.parseInt(esParamObj.getEsMaxReties()));
        return Builder;
    }
}
