package net.bwie.dt.data;

import org.apache.flink.api.common.functions.RichFlatMapFunction;
import org.apache.flink.streaming.api.datastream.DataStreamSource;
import org.apache.flink.streaming.api.datastream.SingleOutputStreamOperator;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.streaming.api.functions.source.RichSourceFunction;
import org.apache.flink.util.Collector;

/**
 * Flink 测试程序
 * @author xuanyu
 * @date 2025/9/3
 */
public class FlinkDemo {

	public static void main(String[] args) throws Exception{
		// 1.
		StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
		env.setParallelism(1) ;
		// 启用检查点
		env.enableCheckpointing(5000L) ;

		// 2.
		DataStreamSource<String> stream = env.addSource(new RichSourceFunction<String>() {

			private volatile boolean isRunning = true;

			@Override
			public void run(SourceContext<String> ctx) throws Exception {
				while (isRunning){
					ctx.collect("flink sql kafka flink flink sql");

					Thread.sleep(1000L);
				}
			}

			@Override
			public void cancel() {
				isRunning = false;
			}
		});


		// 3.
		SingleOutputStreamOperator<String> result = stream.flatMap(new RichFlatMapFunction<String, String>() {
			@Override
			public void flatMap(String value, Collector<String> out) throws Exception {
				for (String word : value.trim().split("\\s+")) {
					out.collect(word);
				}
			}
		}).setParallelism(2);

		// 4.
		result.print("word").setParallelism(1);

		// 5.
		env.execute("FlinkDemo");
	}

}
