package liveData.migration.toPhoenix.util;

import com.alibaba.fastjson.JSON;
import liveData.migration.toPhoenix.entity.FlatMessage;
import org.apache.flink.streaming.api.CheckpointingMode;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.streaming.api.functions.timestamps.AscendingTimestampExtractor;
import org.apache.flink.streaming.connectors.kafka.FlinkKafkaConsumer;

/**
 * @program: userPortrait
 * @description:
 * @author: sunteng
 * @create: 2020-07-22 17:57
 **/
public class FlinkSettings {
    public static void assignTw(FlinkKafkaConsumer consumer) {
        consumer.assignTimestampsAndWatermarks(new AscendingTimestampExtractor() {

            @Override
            public long extractAscendingTimestamp(Object element) {
                return parseJson(element.toString()).getEs();
            }
        });
    }

    public static void assignTwFlat(FlinkKafkaConsumer consumer) {
        consumer.assignTimestampsAndWatermarks(new AscendingTimestampExtractor<FlatMessage>() {

            @Override
            public long extractAscendingTimestamp(FlatMessage element) {
                if (element == null) {
                    return System.currentTimeMillis();
                }

                return element.getEs();
            }
        });
    }

    public static FlatMessage parseJson(String element) {
        return JSON.parseObject(element, FlatMessage.class);
    }

    public static void checkpointSettings(StreamExecutionEnvironment env) {
        env.enableCheckpointing(600000, CheckpointingMode.EXACTLY_ONCE);
        env.getCheckpointConfig().setCheckpointTimeout(300000);
        env.getCheckpointConfig().setMaxConcurrentCheckpoints(3);
        env.getCheckpointConfig().setTolerableCheckpointFailureNumber(10);
        env.getCheckpointConfig().setPreferCheckpointForRecovery(true);
    }
}
