package net.bwie.flink;

import com.alibaba.fastjson.JSON;
import com.alibaba.fastjson.JSONObject;
import org.apache.flink.api.common.eventtime.WatermarkStrategy;
import org.apache.flink.api.common.serialization.SimpleStringSchema;
import org.apache.flink.connector.base.DeliveryGuarantee;
import org.apache.flink.connector.kafka.sink.KafkaRecordSerializationSchema;
import org.apache.flink.connector.kafka.sink.KafkaSink;
import org.apache.flink.connector.kafka.source.KafkaSource;
import org.apache.flink.connector.kafka.source.enumerator.initializer.OffsetsInitializer;
import org.apache.flink.core.fs.FileSystem;
import org.apache.flink.streaming.api.datastream.DataStreamSource;
import org.apache.flink.streaming.api.datastream.SingleOutputStreamOperator;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.streaming.api.functions.ProcessFunction;
import org.apache.flink.util.Collector;

/**
 * Flink 实时数据写入文件
 *      todo 从Kafka消息队列获取数据，解析JSON数据，并且写入文件和Kafka队列
 * @author xuanyu
 * @date 2025/10/17
 */
public class _01FlinkSinkDemo {

	public static void main(String[] args) throws Exception{
		// 1. 执行环境-env
		StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment() ;
		env.setParallelism(1);

		// 2. 数据源-source  todo 从Kafka Topic队列消费数据
		/*
			先启动Kafka集群
				zk.sh start
				kfk.sh start
			模拟产生日志数据，在node101上执行如下命令：
				启动Flume Agent：
					f1.sh start
				启动模拟数据程序：
					lg.sh start
					执行多次
		 */
		KafkaSource<String> source = KafkaSource.<String>builder()
			.setBootstrapServers("node101:9092,node102:9092,node103:9092")
			.setTopics("topic-log")
			.setGroupId("my-group")
			.setStartingOffsets(OffsetsInitializer.earliest())
			.setValueOnlyDeserializer(new SimpleStringSchema())
			.build();
		DataStreamSource<String> stream = env.fromSource(source, WatermarkStrategy.noWatermarks(), "Kafka Source");
		// stream.print("kafka");

		// 3. 数据转换-transformation
		SingleOutputStreamOperator<String> stream3 = stream.process(
			new ProcessFunction<String, String>() {
				@Override
				public void processElement(String value, Context ctx, Collector<String> out) throws Exception {
					// 仅仅获取有page字段日志数据
					JSONObject jsonObject = JSON.parseObject(value);
					// 提取page字段值
					Object pageValue = jsonObject.get("page");
					// 判断有值
					if(null != pageValue){
						out.collect(value);
					}
				}
			}
		);
		// stream3.print("page");

		// 4. 接收器-sink
		// 4-1. 写入Kafka 队列
		KafkaSink<String> sink = KafkaSink.<String>builder()
			.setBootstrapServers("node101:9092,node102:9092,node103:9092")
			.setRecordSerializer(KafkaRecordSerializationSchema.builder()
				.setTopic("page-log")
				.setValueSerializationSchema(new SimpleStringSchema())
				.build()
			)
			.setDeliveryGuarantee(DeliveryGuarantee.AT_LEAST_ONCE)
			.build();
		stream3.sinkTo(sink);

		// 4-2. 写入文件
		stream3.writeAsText("datas/page-log.txt", FileSystem.WriteMode.OVERWRITE);


		// 5. 触发执行-execute
		env.execute("FlinkSinkDemo") ;
	}

}
