package com.intct.flink;

import com.intct.hbase.bean.Student;
import org.apache.flink.api.common.functions.ReduceFunction;
import org.apache.flink.api.java.functions.KeySelector;
import org.apache.flink.streaming.api.datastream.DataStreamSource;
import org.apache.flink.streaming.api.datastream.KeyedStream;
import org.apache.flink.streaming.api.datastream.SingleOutputStreamOperator;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;

/**
 * @author gufg
 * @since 2025-06-25 15:52
 */
public class AggregationReduceDemo {
    public static void main(String[] args) throws Exception {
        // 创建环境
        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
        env.setParallelism(1);

        // 数据源
        DataStreamSource<Student> source = env.fromElements(
                new Student("c_01", "s_001", 3),
                new Student("c_02", "s_002", 1),
                new Student("c_03", "s_003", 1),
                new Student("c_01", "s_004", 1),
                new Student("c_01", "s_005", 2)
        );

        // 分组(分区)
        KeyedStream<Student, String> keyByDS = source.keyBy(new KeySelector<Student, String>() {
            @Override
            public String getKey(Student value) throws Exception {
                return value.getClassId();
            }
        });

        /**
         * 如果同一个分区中数据,第一次做reduce时,不进行reduce自定义函数,只有同一个分区中第二条数据进行时才进行reduce处理
         */
        SingleOutputStreamOperator<Student> reduceDS = keyByDS.reduce(new ReduceFunction<Student>() {
            @Override
            public Student reduce(Student value1, Student value2) throws Exception {
//                System.out.println("value1 == " + value1);
//                System.out.println("value2 == " + value2);

                long score = value1.getScore() + value2.getScore();

                return new Student(value2.getClassId(), null, score);
            }
        });

        reduceDS.print();


        // 启动作业
        env.execute();
    }
}
