package net.bwie.flink;

import com.alibaba.fastjson.JSON;
import com.alibaba.fastjson.JSONObject;
import com.ververica.cdc.connectors.mysql.source.MySqlSource;
import com.ververica.cdc.connectors.mysql.table.StartupOptions;
import com.ververica.cdc.debezium.JsonDebeziumDeserializationSchema;
import lombok.SneakyThrows;
import org.apache.flink.api.common.eventtime.SerializableTimestampAssigner;
import org.apache.flink.api.common.eventtime.WatermarkStrategy;
import org.apache.flink.api.java.functions.KeySelector;
import org.apache.flink.streaming.api.datastream.DataStreamSource;
import org.apache.flink.streaming.api.datastream.KeyedStream;
import org.apache.flink.streaming.api.datastream.SingleOutputStreamOperator;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.streaming.api.functions.co.ProcessJoinFunction;
import org.apache.flink.streaming.api.windowing.time.Time;
import org.apache.flink.util.Collector;

import java.text.SimpleDateFormat;
import java.time.Duration;
import java.util.Date;

/**
 * 采用interval join方式将order_info和order_detail表数据关联join
 * @author xuanyu
 * @date 2025/10/27
 */
public class _07FlinkIntervalJoinDemo {

	public static void main(String[] args) throws Exception{
		// 1. 执行环境-env
		StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
		env.setParallelism(1);
		env.enableCheckpointing(3000);

		// 2. 数据源-source
		/*
			todo 使用Flink CDC采集MySQL表时，基于官方文档编码，注意数据库表中字段类型，不要时DECIMAL（改为double）和DATATIME（varchar）类型
		 */
		MySqlSource<String> infoSource = MySqlSource.<String>builder()
			.hostname("node101")
			.port(3306)
			.databaseList("gmall")
			.tableList("gmall.order_info")
			.username("root")
			.password("123456")
			.startupOptions(StartupOptions.initial())
			.deserializer(new JsonDebeziumDeserializationSchema())
			.build();
		DataStreamSource<String> infoStream = env.fromSource(
			infoSource, WatermarkStrategy.noWatermarks(), "MySQL OrderInfo Source"
		);
		//infoStream.print("info");

		MySqlSource<String> detailSource = MySqlSource.<String>builder()
			.hostname("node101")
			.port(3306)
			.databaseList("gmall")
			.tableList("gmall.order_detail")
			.username("root")
			.password("123456")
			.startupOptions(StartupOptions.initial())
			.deserializer(new JsonDebeziumDeserializationSchema())
			.build();
		DataStreamSource<String> detailStream = env.fromSource(
			detailSource, WatermarkStrategy.noWatermarks(), "MySQL OrderDetailSource"
		);
		//detailStream.print("detail");

		// 3. 数据转换-transformation
		// 3-0. 指定2个流中事件时间
		SingleOutputStreamOperator<String> timeInfoStream = infoStream.assignTimestampsAndWatermarks(
			WatermarkStrategy
				.<String>forBoundedOutOfOrderness(Duration.ofSeconds(0))
				.withTimestampAssigner(new SerializableTimestampAssigner<String>() {
					private SimpleDateFormat format = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss");
					@SneakyThrows
					@Override
					public long extractTimestamp(String element, long recordTimestamp) {
						// 解析JSON
						JSONObject jsonObject = JSON.parseObject(element);
						// 获取after值
						JSONObject afterJsonObject = jsonObject.getJSONObject("after");
						// 获取create_time字段值
						String createTime = afterJsonObject.getString("create_time");
						// 转换
						Date date = format.parse(createTime);
						// 时间戳
						return date.getTime();
					}
				})
		);

		SingleOutputStreamOperator<String> timeDetailStream = detailStream.assignTimestampsAndWatermarks(
			WatermarkStrategy
				.<String>forBoundedOutOfOrderness(Duration.ofSeconds(0))
				.withTimestampAssigner(new SerializableTimestampAssigner<String>() {
					private SimpleDateFormat format = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss");
					@SneakyThrows
					@Override
					public long extractTimestamp(String element, long recordTimestamp) {
						// 解析JSON
						JSONObject jsonObject = JSON.parseObject(element);
						// 获取after值
						JSONObject afterJsonObject = jsonObject.getJSONObject("after");
						// 获取create_time字段值
						String createTime = afterJsonObject.getString("create_time");
						// 转换
						Date date = format.parse(createTime);
						// 时间戳
						return date.getTime();
					}
				})
		);

		// 3-1. 对2个流进行分组，按照关联字段分组
		KeyedStream<String, Long> infoKeyedStream = timeInfoStream.keyBy(new KeySelector<String, Long>() {
			@Override
			public Long getKey(String value) throws Exception {
				// 解析JSON
				JSONObject jsonObject = JSON.parseObject(value);
				// 获取after值
				JSONObject afterJsonObject = jsonObject.getJSONObject("after");
				// 获取id字段值
				Long idValue = afterJsonObject.getLong("id");
				// 返回
				return idValue;
			}
		});

		KeyedStream<String, Long> detailKeyedStream = timeDetailStream.keyBy(new KeySelector<String, Long>() {
			@Override
			public Long getKey(String value) throws Exception {
				// 解析JSON
				JSONObject jsonObject = JSON.parseObject(value);
				// 获取after值
				JSONObject afterJsonObject = jsonObject.getJSONObject("after");
				// 获取id字段值
				Long orderIdValue = afterJsonObject.getLong("order_id");
				// 返回
				return orderIdValue;
			}
		});

		// 3-2. 流进行关联
		KeyedStream.IntervalJoined<String, String, Long> joinStream = infoKeyedStream
			.intervalJoin(detailKeyedStream)
			.between(Time.seconds(-1), Time.seconds(2));

		// 3-3. 关联流数据处理
		SingleOutputStreamOperator<String> stream3 = joinStream.process(new ProcessJoinFunction<String, String, String>() {
			@Override
			public void processElement(String left,
			                           String right,
			                           Context ctx, Collector<String> out) throws Exception {
				// left 表示左边数据流，此处为infoStream
				JSONObject infoObject = JSON.parseObject(left).getJSONObject("after");
				// right 表示右边数据流，此处为detailStream
				JSONObject detailObject = JSON.parseObject(right).getJSONObject("after");
				// 输出
				JSONObject object = new JSONObject() ;
				object.put("info", infoObject);
				object.put("detail", detailObject);
				out.collect(object.toJSONString());
			}
		});
		stream3.print("join");

		// 5.触发执行-execute
		env.execute("FlinkIntervalJoinDemo") ;
	}

}
