package com.yifeng.repo.flink.data.transport.bootstrap;

import org.apache.flink.api.common.eventtime.WatermarkStrategy;
import org.apache.flink.streaming.api.datastream.DataStreamSource;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;

import com.ververica.cdc.connectors.oracle.source.OracleSourceBuilder;
import com.ververica.cdc.connectors.oracle.source.OracleSourceBuilder.OracleIncrementalSource;
import com.yifeng.repo.flink.data.transport.config.JobParametersConfig;
import com.yifeng.repo.flink.data.transport.config.SourceDbConfig;
import com.yifeng.repo.flink.data.transport.streaming.connectors.kafka.IncreKafkaOracleSerializationSchema;
import com.yifeng.repo.flink.data.transport.streaming.connectors.oracle.OracleDebeziumDeserializationSchema;
import com.yifeng.repo.flink.data.transport.utils.id.ObjectId;
import com.yifeng.repo.flink.data.transport.utils.kafka.KafkaUtil;

/**
 * oracle的cdc任务,使用flink cdc的原有功能，支持不同的StartupMode(全量+增量、增量（最早、最新、时间点、指定位点）)
 * @author lijing
 * @since 2023-03-14
 * 
 */
public class OracleFlinkCdcTask {

	public static void main(String[] args) throws Exception {
		//获取配置
		JobParametersConfig config = BootstrapHelper.setEnvAndParameter(args);
		
		SourceDbConfig sourceDbConfig = config.getSourceDbConfig();
		
		OracleIncrementalSource<String> oracleSource = new OracleSourceBuilder<String>()
                .hostname(sourceDbConfig.getHostname())
                .port(sourceDbConfig.getPort())
                .databaseList(sourceDbConfig.getDatabase())
                .schemaList(sourceDbConfig.getSchemaList())
                .tableList(sourceDbConfig.getTableList())
                .username(sourceDbConfig.getUsername())
                .password(sourceDbConfig.getPassword())
				.debeziumProperties(BootstrapHelper.oracleDebeziumProperties(config))
				.deserializer(new OracleDebeziumDeserializationSchema(config.isSchemaChangeToDownstream(),sourceDbConfig.getDatabase()))
				.startupOptions(BootstrapHelper.startupOptions(config))
				.includeSchemaChanges(config.isIncludeSchemaChange())
				.build();
		
		StreamExecutionEnvironment env = BootstrapHelper.setEnvironment(config.getStreamExecutionConfig());
		
		DataStreamSource<String> oracleSourceDataStream = env.fromSource(oracleSource, WatermarkStrategy.noWatermarks(), config.getCdcTaskname()).setParallelism(config.getSourceParallelism()); 
		//生成事务ID 
		String transactionId = ObjectId.get().toString();
		oracleSourceDataStream.addSink(KafkaUtil.getKafkaBySchema(new IncreKafkaOracleSerializationSchema(config.getKafkaConfig().getPartitionSize(),config.getKafkaConfig().getTopic()),transactionId,config.getKafkaConfig())).name(config.getCdcTaskname() + ":" + transactionId).uid(transactionId).setParallelism(config.getSinkParallelism());
		
		env.execute(config.getCdcTaskname());
	}
	
}
