package com.sugon.ohdfs.integration.flink.job.stream.sink;

import com.sugon.ohdfs.integration.flink.domain.TestItem;
import com.sugon.ohdfs.integration.flink.service.TestItemSource;
import org.apache.flink.runtime.state.hashmap.HashMapStateBackend;
import org.apache.flink.streaming.api.CheckpointingMode;
import org.apache.flink.streaming.api.datastream.DataStreamSource;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.streaming.api.functions.sink.filesystem.StreamingFileSink;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.beans.factory.annotation.Value;

import javax.annotation.PostConstruct;
import java.io.Serializable;
import java.time.LocalDateTime;

public abstract class AbstractStreamSinkJob implements Serializable{

    @Value("${flink.checkpoint.interval:60000}")
    long checkPointInterval;
    @Value("${hdfs.checkpoint.path:hdfs://10.11.4.69:8020/flink/sinkTest/checkpoint}")
    String checkPointFilePath;
    @Value("${hdfs.savepoint.path:hdfs://10.11.4.69:8020/flink/sinkTest/savepoint}")
    String savePointFilePath;

    @Autowired
    TestItemSource testItemSource;

    /**
     * 进行序列化编码或者转换为其他适配sink编码器的对象，以便对象的持久化
     * @param item
     * @return
     */
    protected abstract Serializable transform(TestItem item);

    /**
     * 构建sink消费者，用于数据持久化落地
     * @return
     */
    protected abstract StreamingFileSink buildSink();

    @PostConstruct
    public void init() throws Exception {
        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();

        //流式处理需要打开checkpoint和stateBackend，否则会导致数据文件在极端情况下丢失状态无法complete
        env.enableCheckpointing(checkPointInterval, CheckpointingMode.EXACTLY_ONCE);
        env.setStateBackend(new HashMapStateBackend());
        env.getCheckpointConfig().setCheckpointStorage(checkPointFilePath);

        //加载数据
        DataStreamSource<TestItem> source = env.addSource(testItemSource);

        //梳理数据并持久化
        source.map(item -> {
            item.setProcessTimeStamp(LocalDateTime.now());
            return transform(item);
        }).addSink(buildSink());

        //启动job
        env.execute(String.format("job %s started", jobName()));
    }

    public String jobName(){
        return getClass().getSimpleName();
    }
}
