package net.bwie.flink;

import com.alibaba.fastjson.JSON;
import com.alibaba.fastjson.JSONObject;
import lombok.SneakyThrows;
import org.apache.flink.api.common.eventtime.SerializableTimestampAssigner;
import org.apache.flink.api.common.eventtime.WatermarkStrategy;
import org.apache.flink.api.common.serialization.SimpleStringSchema;
import org.apache.flink.connector.base.DeliveryGuarantee;
import org.apache.flink.connector.kafka.sink.KafkaRecordSerializationSchema;
import org.apache.flink.connector.kafka.sink.KafkaSink;
import org.apache.flink.connector.kafka.source.KafkaSource;
import org.apache.flink.connector.kafka.source.enumerator.initializer.OffsetsInitializer;
import org.apache.flink.streaming.api.datastream.DataStreamSource;
import org.apache.flink.streaming.api.datastream.KeyedStream;
import org.apache.flink.streaming.api.datastream.SideOutputDataStream;
import org.apache.flink.streaming.api.datastream.SingleOutputStreamOperator;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.streaming.api.functions.ProcessFunction;
import org.apache.flink.streaming.api.functions.co.ProcessJoinFunction;
import org.apache.flink.streaming.api.windowing.time.Time;
import org.apache.flink.util.Collector;
import org.apache.flink.util.OutputTag;

import java.text.SimpleDateFormat;
import java.time.Duration;
import java.util.Date;

/**
 * Flink DataStream中2个数据流进行关联join--采用Interval Join
 * @author xuanyu
 * @date 2025/10/28
 */
public class _05GmallJoinJob {

	public static void main(String[] args) throws Exception{
		// 1. 执行环境-env
		StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment() ;
		env.setParallelism(1) ;

		// 2.数据源-source
		KafkaSource<String> source = KafkaSource.<String>builder()
			.setBootstrapServers("node101:9092,node102:9092,node103:9092")
			.setTopics("db-topic")
			.setGroupId("gmall-payment-g1")
			.setStartingOffsets(OffsetsInitializer.earliest())
			.setValueOnlyDeserializer(new SimpleStringSchema())
			.build();
		DataStreamSource<String> stream = env.fromSource(
			source, WatermarkStrategy.noWatermarks(), "Kafka Source"
		);

		// 3.数据转换-transformation
		OutputTag<String> outputTag = new OutputTag<String>("order-detail") {};
		// 3-1. order_info 数据流
		SingleOutputStreamOperator<String> infoStream = stream.process(
			new ProcessFunction<String, String>() {
				@Override
				public void processElement(String value,
				                           Context ctx,
				                           Collector<String> out) throws Exception {
					// 解析JSON
					JSONObject jsonObject = JSON.parseObject(value).getJSONObject("source");
					// 表名称
					String table = jsonObject.getString("table");
					// todo 如果时 order_info 表数据，主流输出；如果是 order_detail 表数据，侧边流输出
					if("order_info".equals(table)){
						out.collect(value);
					}
					if("order_detail".equals(table)){
						ctx.output(outputTag, value);
					}
				}
			}
		);

		// 3-2. order_detail 数据流
		SideOutputDataStream<String> detailStream = infoStream.getSideOutput(outputTag);

		// 3-3. 设置流中事件时间戳和水位线
		SingleOutputStreamOperator<String> timeInfoStream = infoStream.assignTimestampsAndWatermarks(
			WatermarkStrategy
				.<String>forBoundedOutOfOrderness(Duration.ofSeconds(0))
				.withTimestampAssigner(new SerializableTimestampAssigner<String>() {
					private SimpleDateFormat format = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss");
					@SneakyThrows
					@Override
					public long extractTimestamp(String element, long recordTimestamp) {
						JSONObject jsonObject = JSON.parseObject(element).getJSONObject("after");
						String createTime = jsonObject.getString("create_time");
						// 解析
						Date date = format.parse(createTime);
						// 时间戳
						return date.getTime();
					}
				})
		);

		SingleOutputStreamOperator<String> timedetailStream = detailStream.assignTimestampsAndWatermarks(
			WatermarkStrategy
				.<String>forBoundedOutOfOrderness(Duration.ofSeconds(0))
				.withTimestampAssigner(new SerializableTimestampAssigner<String>() {
					private SimpleDateFormat format = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss");
					@SneakyThrows
					@Override
					public long extractTimestamp(String element, long recordTimestamp) {
						JSONObject jsonObject = JSON.parseObject(element).getJSONObject("after");
						String createTime = jsonObject.getString("create_time");
						// 解析
						Date date = format.parse(createTime);
						// 时间戳
						return date.getTime();
					}
				})
		);

		// 3-4. 将数据流按照关联字段进行分组KeyBy
		KeyedStream<String, String> keyedInfoStream = timeInfoStream.keyBy(
			json -> JSON.parseObject(json).getJSONObject("after").getString("id")
		);

		KeyedStream<String, String> keyedDetailStream = timedetailStream.keyBy(
			json -> JSON.parseObject(json).getJSONObject("after").getString("order_id")
		);

		// 3-5. 流进行join
		KeyedStream.IntervalJoined<String, String, String> joinStream = keyedInfoStream
			.intervalJoin(keyedDetailStream)
			.between(Time.seconds(-1), Time.seconds(2));

		// 3-6. 关联处理
		SingleOutputStreamOperator<String> resultStream = joinStream.process(
			new ProcessJoinFunction<String, String, String>() {
				@Override
				public void processElement(String left,
				                           String right,
				                           Context ctx,
				                           Collector<String> out) throws Exception {
					// left 表示左边流
					JSONObject infoValue = JSON.parseObject(left).getJSONObject("after");
					// right 表示右边流
					JSONObject rightValue = JSON.parseObject(right).getJSONObject("after");
					// 转换json字符串
					JSONObject jsonObject = new JSONObject();
					jsonObject.put("info", infoValue) ;
					jsonObject.put("detail", rightValue);
					out.collect(jsonObject.toJSONString());
				}
			}
		);
//		resultStream.print("join");

		// 4.数据接收器-sink
		KafkaSink<String> sink = KafkaSink.<String>builder()
			.setBootstrapServers("node101:9092,node102:9092,node103:9092")
			.setRecordSerializer(KafkaRecordSerializationSchema.builder()
				.setTopic("dwd-order")
				.setValueSerializationSchema(new SimpleStringSchema())
				.build()
			)
			.setDeliveryGuarantee(DeliveryGuarantee.AT_LEAST_ONCE)
			.build();
		resultStream.sinkTo(sink);

		// 5.触发执行-execute
		env.execute("GmallJoinJob") ;
	}

}








