package com.patsnap.data.npd.dw.etl.job.ods.job;

import com.patsnap.data.npd.dw.etl.job.ods.dispatcher.DataLayer;
import com.patsnap.data.npd.dw.etl.job.ods.serialization.PreOdsInfo;
import com.patsnap.data.npd.dw.etl.job.ods.sink.OdsNpdRecordSink;
import org.apache.flink.api.java.utils.ParameterTool;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;

public class OdsJournalIncrementJob extends OdsFlinkJobLauncher {

    private static final String KAFKA_SOURCE_NAME_JOURNAL = "source_journal";

    @Override
    protected void laucher(String[] args) throws Exception {
        ParameterTool parameterTool = mergeApolloConfig(ParameterTool.fromArgs(args));
        StreamExecutionEnvironment env = getExecutionEnvironment(1, parameterTool, args, OdsJournalIncrementJob.class.getName());

        if (Boolean.parseBoolean(parameterTool.get(CHECK_POINT_IS, "true"))) {
            enableCheckpointing(env, parameterTool);
        }

        generateSource(env, parameterTool, KAFKA_SOURCE_NAME_JOURNAL)
                .keyBy(PreOdsInfo::getKeyBy)
                .addSink(new OdsNpdRecordSink(DataLayer.ODS))
                .setParallelism(parameterTool.getInt("", 3))
                .name("etl-to-db");
        env.execute(parameterTool.get("job_name", "Journal Ods Job"));
    }

    public static void main(String[] args) throws Exception {
        OdsJournalIncrementJob odsJournalIncrementJob = new OdsJournalIncrementJob();
        odsJournalIncrementJob.laucher(args);
    }
}
