package net.bwie.flink;

import org.apache.flink.api.common.functions.FlatMapFunction;
import org.apache.flink.api.common.functions.MapFunction;
import org.apache.flink.api.java.functions.KeySelector;
import org.apache.flink.api.java.tuple.Tuple2;
import org.apache.flink.streaming.api.datastream.DataStreamSource;
import org.apache.flink.streaming.api.datastream.KeyedStream;
import org.apache.flink.streaming.api.datastream.SingleOutputStreamOperator;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.util.Collector;

/**
 * 基于Flink编写流式计算：统计订单金额
 * @author xuanyu
 * @date 2025/10/15
 */
public class FlinkStreamOrder {

	/**
	 * flink 流式计算程序，代码结构分位5个部分：
		 * 1-执行环境env
		 * 2-数据源source
		 * 3-数据转换transformation
		 * 4-数据终端sink
		 * 5-触发执行execute
	 */
	public static void main(String[] args) throws Exception{
		// 1-执行环境env
		StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
		env.setParallelism(1) ;

		// 2-数据源source
		DataStreamSource<String> stream = env.socketTextStream("node101", 9999);

		// 3-数据转换transformation
		// 3-2. 转换元组
		SingleOutputStreamOperator<Tuple2<String, Integer>> stream2 = stream.map(
			new MapFunction<String, Tuple2<String, Integer>>() {
				@Override
				public Tuple2<String, Integer> map(String value) throws Exception {
					// value ->   u1,bj,10
					String[] split = value.split(",");
					// 提取字段值
					return Tuple2.of(split[1], Integer.valueOf(split[2]));
				}
			}
		);

		// 3-3. 分组
		KeyedStream<Tuple2<String, Integer>, String> stream3 = stream2.keyBy(
			new KeySelector<Tuple2<String, Integer>, String>() {
				@Override
				public String getKey(Tuple2<String, Integer> value) throws Exception {
					return value.f0;
				}
			}
		);

		// 3-4. 求和
		SingleOutputStreamOperator<Tuple2<String, Integer>> stream4 = stream3.sum(1);

		// 4-数据终端sink
		stream4.print();

		// 5-触发执行execute
		env.execute("FlinkStreamOrder") ;
	}

}
