package com.diver.flinkdemo;

import com.diver.flinkdemo.entity.DalData;
import com.diver.flinkdemo.entity.xxt.IssuedPermissionRecord;
import com.diver.flinkdemo.sink.IssuedPermissionRecordSink;
import lombok.extern.slf4j.Slf4j;
import org.apache.commons.lang3.StringUtils;
import org.apache.flink.api.common.eventtime.WatermarkStrategy;
import org.apache.flink.api.common.functions.FilterFunction;
import org.apache.flink.api.common.functions.MapFunction;
import org.apache.flink.api.common.serialization.SimpleStringSchema;
import org.apache.flink.connector.kafka.source.KafkaSource;
import org.apache.flink.connector.kafka.source.enumerator.initializer.OffsetsInitializer;
import org.apache.flink.streaming.api.CheckpointingMode;
import org.apache.flink.streaming.api.datastream.DataStream;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;

import java.util.Date;

/**
 * @author lujw
 * @Date 2023/4/26
 * @desc "下发的指令的执行情况上报"
 */
@Slf4j
public class IssueCommandCallBackJob {

    public static void main(String[] args) {


        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
//             exactly-once 语义保证整个应用内端到端的数据一致性
        env.getCheckpointConfig().setCheckpointingMode(CheckpointingMode.EXACTLY_ONCE);
//             开启检查点并指定检查点时间间隔为5s
        env.enableCheckpointing(60000); // checkpoint every 5000 msecs

        KafkaSource<String> source =
                KafkaSource.<String>builder().setBootstrapServers("172.16.100.67:9092").setTopics("aiot-msg")
                        .setGroupId("flinkdev").setStartingOffsets(OffsetsInitializer.latest())//OffsetsInitializer.committedOffsets(OffsetResetStrategy.LATEST)
                        .setValueOnlyDeserializer(new SimpleStringSchema()).build();

        DataStream<String> stream = env.fromSource(source, WatermarkStrategy.noWatermarks(), "Kafka Source") // 指定从最新offset开始消费;
                .setParallelism(1);

        //2.指令下发状态回复入库
        stream.filter(new FilterFunction<String>() {
            @Override
            public boolean filter(String s) throws Exception {
                DalData dalData = DalData.convert(s);
                return "TASK_TRACKING".equals(dalData.getType());
            }
        }).map(new MapFunction<String, DalData>() {
            @Override
            public DalData map(String s) throws Exception {
                log.info("过滤出TASK_TRACKING类型的数据");
                DalData dalData = DalData.convert(s);
                return dalData;
            }
        }).addSink(new IssuedPermissionRecordSink());

        try {
            env.execute("IssueCommandCallBackJob");
        } catch (Exception e) {
            e.printStackTrace();
        }
    }
}
