package study.bigdata.flink.state;

import lombok.extern.slf4j.Slf4j;
import org.apache.flink.api.common.state.ListState;
import org.apache.flink.api.common.state.ListStateDescriptor;
import org.apache.flink.api.common.state.OperatorStateStore;
import org.apache.flink.api.common.typeinfo.PrimitiveArrayTypeInfo;
import org.apache.flink.configuration.Configuration;
import org.apache.flink.runtime.state.FunctionInitializationContext;
import org.apache.flink.runtime.state.FunctionSnapshotContext;
import org.apache.flink.streaming.api.checkpoint.CheckpointedFunction;
import org.apache.flink.streaming.api.functions.source.RichSourceFunction;

import java.util.concurrent.TimeUnit;

@Slf4j
public class CkpointSourceFunction extends RichSourceFunction<String> implements CheckpointedFunction {

    private volatile boolean running = false;

    /*  */
    private String[] sourceStrArray;
    /* cursor 默认存储的是已经处理完的数据，类比kafka offset， mongo resume token */
    private Integer cursor = 0;
    /* 位移状态 */
    private transient ListState<int[]> offsetState;

    @Override
    public void open(Configuration parameters) throws Exception {
        log.info(">>>>>> open");
        sourceStrArray = new String[1000];
        for (int i = 0; i < 1000; i++) {
            sourceStrArray[i] = String.valueOf(i);
        }
        log.info("source array init finish {}", sourceStrArray);
    }

    @Override
    public void run(SourceContext<String> ctx) throws Exception {
        log.info(">>>>>> run");
        int i = cursor;
        while (running) {
            if (i == 1000) {
                break;
            }
            ctx.collect(sourceStrArray[i]);
            cursor = i;
            i++;
            TimeUnit.SECONDS.sleep(1);
        }
    }

    @Override
    public void cancel() {
        running = false;
    }

    /**
     * ==================================================
     * <p>
     * ==================================================
     */
    @Override
    public void snapshotState(FunctionSnapshotContext context) throws Exception {
        log.info(">>>>>>>>>> snapshotState snapshotState snapshotState");
        context.getCheckpointId();
        context.getCheckpointTimestamp();
    }

    /**
     * ==================================================
     * 初始化状态
     * ==================================================
     */
    @Override
    public void initializeState(FunctionInitializationContext context) throws Exception {
        log.info(">>>>>>>>>> initializeState initializeState initializeState");
        // 状态描述
        ListStateDescriptor descriptor = new ListStateDescriptor("cktest_" + this.getClass().getName(),
                PrimitiveArrayTypeInfo.INT_PRIMITIVE_ARRAY_TYPE_INFO);
        // 状态存储
        OperatorStateStore stateStore = context.getOperatorStateStore();
        // 状态
        offsetState = stateStore.getListState(descriptor);

        if (context.isRestored()) {

        }

    }

    private void restoreOffsetState() throws Exception {
        for (int[] serializedOffset : offsetState.get()) {
            if (cursor == null) {
                // cursor 默认存储的是已经处理完的数据，类比kafka offset， mongo resume token
                cursor = serializedOffset[0];
            } else {
                throw new RuntimeException("only support single task, " +
                        "however, this is restored from multiple tasks.");
            }
        }
        log.info("source subtask {} restored offset state: {}.", getRuntimeContext().getIndexOfThisSubtask(), cursor);
    }
}
