package net.bwie.flink;

import com.alibaba.fastjson.JSON;
import com.alibaba.fastjson.JSONObject;
import org.apache.flink.api.common.eventtime.WatermarkStrategy;
import org.apache.flink.api.common.serialization.SimpleStringSchema;
import org.apache.flink.connector.base.DeliveryGuarantee;
import org.apache.flink.connector.kafka.sink.KafkaRecordSerializationSchema;
import org.apache.flink.connector.kafka.sink.KafkaSink;
import org.apache.flink.connector.kafka.source.KafkaSource;
import org.apache.flink.connector.kafka.source.enumerator.initializer.OffsetsInitializer;
import org.apache.flink.streaming.api.datastream.DataStream;
import org.apache.flink.streaming.api.datastream.DataStreamSource;
import org.apache.flink.streaming.api.datastream.SideOutputDataStream;
import org.apache.flink.streaming.api.datastream.SingleOutputStreamOperator;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.streaming.api.functions.ProcessFunction;
import org.apache.flink.util.Collector;
import org.apache.flink.util.OutputTag;

/**
 * Flink 实时数据写入文件
 *      todo 从Kafka消息队列获取数据，解析JSON数据，写入Kafka队列
 * @author xuanyu
 * @date 2025/10/17
 */
public class _03FlinkSideOutputDemo {

	public static void main(String[] args) throws Exception{
		// 1. 执行环境-env
		StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment() ;
		env.setParallelism(1);

		// 2. 数据源-source  todo 从Kafka Topic队列消费数据
		/*
			先启动Kafka集群
				zk.sh start
				kfk.sh start
			模拟产生日志数据，在node101上执行如下命令：
				启动Flume Agent：
					f1.sh start
				启动模拟数据程序：
					lg.sh start
					执行多次
		 */
		KafkaSource<String> source = KafkaSource.<String>builder()
			.setBootstrapServers("node101:9092,node102:9092,node103:9092")
			.setTopics("topic-log")
			.setGroupId("my-group")
			.setStartingOffsets(OffsetsInitializer.earliest())
			.setValueOnlyDeserializer(new SimpleStringSchema())
			.build();
		DataStreamSource<String> stream = env.fromSource(source, WatermarkStrategy.noWatermarks(), "Kafka Source");
		// stream.print("kafka");

		// 3. 数据转换-transformation
		/*
			1). start 启动日志
			2). page 页面日志
			3). error 错误日志
		 */
		// todo 将start、page、error日志分流，使用Flink SideOutput实现，最终数据写入Kafka队列
		/*
			先定义标签tag，然后给数据打上标签，最后依据标签获取数据
			https://nightlies.apache.org/flink/flink-docs-release-1.17/docs/dev/datastream/side_output/
		 */
		OutputTag<String> startTag = new OutputTag<String>("start-log") {};
		OutputTag<String> errorTag = new OutputTag<String>("error-log") {};

		SingleOutputStreamOperator<String> pageStream = stream.process(
			new ProcessFunction<String, String>() {
				@Override
				public void processElement(String value, Context ctx, Collector<String> out) throws Exception {
					// 解析json
					JSONObject jsonObject = JSON.parseObject(value);

					// 判断error
					Object errorValue = jsonObject.get("err");
					if(null != errorValue){
						ctx.output(errorTag, value);
					}

					// 判断start
					Object startValue = jsonObject.get("start");
					if(null != startValue){
						ctx.output(startTag, value);
					}

					// 判断page
					Object pageValue = jsonObject.get("page");
					if(null != pageValue){
						out.collect(value);
					}
				}
			}
		);

		// 获取侧流数据，依据标签获取
		SideOutputDataStream<String> startStream = pageStream.getSideOutput(startTag);
		SideOutputDataStream<String> errorStream = pageStream.getSideOutput(errorTag);

		// 4. 接收器-sink
		saveToKafka(pageStream, "page-log");
		saveToKafka(startStream, "start-log");
		saveToKafka(errorStream, "error-log");

		// 5. 触发执行-execute
		env.execute("FlinkSideOutputDemo") ;
	}


	private static void saveToKafka(DataStream<String> stream, String topic){
		KafkaSink<String> sink = KafkaSink.<String>builder()
			.setBootstrapServers("node101:9092,node102:9092,node103:9092")
			.setRecordSerializer(KafkaRecordSerializationSchema.builder()
				.setTopic(topic)
				.setValueSerializationSchema(new SimpleStringSchema())
				.build()
			)
			.setDeliveryGuarantee(DeliveryGuarantee.AT_LEAST_ONCE)
			.build();
		stream.sinkTo(sink);
	}

}
