package net.bwie.flink;

import com.alibaba.fastjson.JSON;
import com.alibaba.fastjson.JSONObject;
import com.ververica.cdc.connectors.mysql.source.MySqlSource;
import com.ververica.cdc.connectors.mysql.table.StartupOptions;
import com.ververica.cdc.debezium.JsonDebeziumDeserializationSchema;
import org.apache.flink.api.common.eventtime.WatermarkStrategy;
import org.apache.flink.api.common.serialization.SimpleStringSchema;
import org.apache.flink.connector.base.DeliveryGuarantee;
import org.apache.flink.connector.kafka.sink.KafkaRecordSerializationSchema;
import org.apache.flink.connector.kafka.sink.KafkaSink;
import org.apache.flink.streaming.api.datastream.DataStream;
import org.apache.flink.streaming.api.datastream.DataStreamSource;
import org.apache.flink.streaming.api.datastream.SideOutputDataStream;
import org.apache.flink.streaming.api.datastream.SingleOutputStreamOperator;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.streaming.api.functions.ProcessFunction;
import org.apache.flink.util.Collector;
import org.apache.flink.util.OutputTag;

/**
 * Flink CDC实时采集MySQL表数据
 * @author xuanyu
 * @date 2025/10/28
 */
public class _07CdcMysqlDemo {

	public static void main(String[] args) throws Exception{
		// 1. 执行环境-env
		StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
		env.setParallelism(1) ;

		// 2. 数据源-source
		/*
		https://nightlies.apache.org/flink/flink-cdc-docs-release-3.5/docs/connectors/flink-sources/mysql-cdc/#datastream-source
		 */
		env.enableCheckpointing(3000);
		MySqlSource<String> mySqlSource = MySqlSource.<String>builder()
			.hostname("node101")
			.port(3306)
			.databaseList("flink_day12")
			.tableList()
			.username("root")
			.password("123456")
			// 全量+增量
			.startupOptions(StartupOptions.initial())
			.deserializer(new JsonDebeziumDeserializationSchema())
			.build();
		DataStreamSource<String> stream = env.fromSource(
			mySqlSource, WatermarkStrategy.noWatermarks(), "MySQL Source"
		);
		//stream.print("cdc");

		// 3.数据转换-transformation
		/*
			将CDC实时捕获数据，通过侧边流方式，将数据分流，分别存储到Kafka消息队列，以便后续处理
				todo 以order_info表作为主流，其他都是侧边流
		 */
		final OutputTag<String> detailTag = new OutputTag<String>("order_detail"){};
		final OutputTag<String> commentTag = new OutputTag<String>("comment_info"){};
		final OutputTag<String> paymentTag = new OutputTag<String>("payment_info"){};

		SingleOutputStreamOperator<String> orderInfoStream = stream.process(
			new ProcessFunction<String, String>() {
				@Override
				public void processElement(String value,
				                           Context ctx,
				                           Collector<String> out) throws Exception {
					// 解析json
					JSONObject jsonObject = JSON.parseObject(value).getJSONObject("source");
					// 字段值
					String table = jsonObject.getString("table");
					// 主流输出
					if("order_info".equals(table)){
						out.collect(value);
					}
					// 测流输出：order_detail、comment_info、payment_info
					if("order_detail".equals(table)){
						ctx.output(detailTag, value);
					}
					if("comment_info".equals(table)){
						ctx.output(commentTag, value);
					}
					if("payment_info".equals(table)){
						ctx.output(paymentTag, value);
					}
				}
			}
		);

		// 获取侧边流
		SideOutputDataStream<String> orderDetailStream = orderInfoStream.getSideOutput(detailTag);
		SideOutputDataStream<String> commentInfoStream = orderInfoStream.getSideOutput(commentTag);
		SideOutputDataStream<String> paymentInfoStream = orderInfoStream.getSideOutput(paymentTag);

		// 4.数据接收器-sink
		saveToKafka(orderInfoStream, "order_info");
		saveToKafka(orderDetailStream, "order_detail");
		saveToKafka(commentInfoStream, "comment_info");
		saveToKafka(paymentInfoStream, "payment_info");

		// 5. 触发执行-execute
		env.execute("GmallCdcJob") ;
	}

	private static void saveToKafka(DataStream<String> stream, String topic) {
		KafkaSink<String> sink = KafkaSink.<String>builder()
			.setBootstrapServers("node101:9092,node102:9092,node103:9092")
			.setRecordSerializer(KafkaRecordSerializationSchema.builder()
				.setTopic(topic)
				.setValueSerializationSchema(new SimpleStringSchema())
				.build()
			)
			.setDeliveryGuarantee(DeliveryGuarantee.AT_LEAST_ONCE)
			.build();
		stream.sinkTo(sink);
	}

}
