package com.learn.flink.learn.transformation;

import org.apache.flink.api.common.functions.ReduceFunction;
import org.apache.flink.api.java.ExecutionEnvironment;
import org.apache.flink.api.java.operators.AggregateOperator;
import org.apache.flink.api.java.operators.DataSource;
import org.apache.flink.api.java.operators.ReduceOperator;
import org.apache.flink.api.java.operators.UnsortedGrouping;
import org.apache.flink.api.java.tuple.Tuple2;

import java.util.ArrayList;
import java.util.List;

public class DataSetTransformation {

    public static void main(String[] args) throws Exception {
        ExecutionEnvironment env = ExecutionEnvironment.getExecutionEnvironment();
        List<Tuple2<String, Integer>> listData = new ArrayList<>();
        listData.add(new Tuple2<String, Integer>("java",1));
        listData.add(new Tuple2<String, Integer>("scale",2));
        listData.add(new Tuple2<String, Integer>("java",1));
        DataSource<Tuple2<String, Integer>> data = env.fromCollection(listData);
        UnsortedGrouping<Tuple2<String, Integer>> grouped = data.groupBy(0);
        AggregateOperator<Tuple2<String, Integer>> sum = grouped.sum(1);
//        ReduceOperator<Tuple2<String, Integer>> reduce = grouped.reduce(new ReduceFunction<Tuple2<String, Integer>>() {
//            @Override
//            public Tuple2<String, Integer> reduce(Tuple2<String, Integer> t1, Tuple2<String, Integer> t2) throws Exception {
//                return new Tuple2<>(t1.f0, t1.f1 + t2.f1);
//            }
//        });
        sum.print();
    }
}
