package pipelines;

import fragment.OracleSource;
import org.apache.flink.api.java.utils.ParameterTool;
import org.apache.flink.configuration.Configuration;
import org.apache.flink.streaming.api.CheckpointingMode;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.table.api.bridge.java.StreamTableEnvironment;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;

import java.util.concurrent.ExecutionException;

/**
 * --PARLLELISM 1 --ORACLE_HOST 10.1.10.135 --ORACLE_PORT 1521 --ORACLE_USER DAAS_ODS --ORACLE_PASSWORD YlYkalBDZngwTVVpbmc= --ORACLE_DATABASENAME ods --ORACLE_SCHEMANAME RTDM --ORACLE_TABLESNAME RTDM.TA_WX_RT_MULT_KX31_20214 --ACTIVE_SINKS iceberg,iceberg_delete --ICEBERG_USER root --ICEBERG_HIVE_META thrift://10.1.75.102:7004 --ICEBERG_DATABASE default --ICEBERG_TABLE_PATTERN ${table}_2 --ICEBERG_DELETED_TABLE_PATTERN ${table}_2_d
 */
public class OraclePipeline {
    private static final Logger LOGGER = LoggerFactory.getLogger(MySQLPipeline.class);

    public static void main(String[] args) throws ExecutionException, InterruptedException {
        ParameterTool pt = ParameterTool.fromArgs(args);

        long checkpointPeriod = pt.getLong("CHEECKPOINTING", 60000);
        int parallelism = pt.getInt("PARLLELISM", -1);

        Configuration configuration = new Configuration();
        StreamExecutionEnvironment env =
                StreamExecutionEnvironment.getExecutionEnvironment(configuration);
        // Important!!
        env.getConfig().enableObjectReuse();
        env.enableCheckpointing(checkpointPeriod, CheckpointingMode.AT_LEAST_ONCE);
        if (parallelism > 0) {
            env.setParallelism(parallelism);
        }

        StreamTableEnvironment tableEnv = StreamTableEnvironment.create(env);

        String sourceName = new OracleSource().createConnector(pt, tableEnv);
        JobUtils.addAllSinks(sourceName, pt, tableEnv);
    }
}
