package cn.doitedu.flink.suanzi;

import org.apache.flink.api.common.functions.FlatMapFunction;
import org.apache.flink.api.common.functions.MapFunction;
import org.apache.flink.api.java.ExecutionEnvironment;
import org.apache.flink.api.java.operators.DataSource;
import org.apache.flink.api.java.operators.FlatMapOperator;
import org.apache.flink.api.java.operators.MapOperator;
import org.apache.flink.api.java.operators.UnsortedGrouping;
import org.apache.flink.api.java.tuple.Tuple2;
import org.apache.flink.runtime.execution.Environment;
import org.apache.flink.util.Collector;

import java.lang.reflect.Array;
import java.util.Arrays;

public class Demo1 {
    public static void main(String[] args) throws Exception {
        ExecutionEnvironment env = ExecutionEnvironment.getExecutionEnvironment();
        DataSource<Tuple2<String, Integer>> ds = env.fromCollection(Arrays.asList(Tuple2.of("a", 1), Tuple2.of("b", 1), Tuple2.of("a", 2), Tuple2.of("c", 3)));
        MapOperator<Tuple2<String, Integer>, Tuple2<String, Integer>> map = ds.map(new MapFunction<Tuple2<String, Integer>, Tuple2<String, Integer>>() {
            @Override
            public Tuple2<String, Integer> map(Tuple2<String, Integer> tp) throws Exception {

                tp.setField(tp.f0.toUpperCase(), 0);


                return tp;
            }
        });

        map.print();

        UnsortedGrouping<Tuple2<String, Integer>> groupBy = map.groupBy(0);
        groupBy.maxBy(1).print();


        DataSource<String> ds2 = env.fromCollection(Arrays.asList("a a a a b b b b c c c","c b c c b b d d f f a"));
        FlatMapOperator<String, Tuple2<String, Integer>> flatmap = ds2.flatMap(new FlatMapFunction<String, Tuple2<String, Integer>>() {
            @Override
            public void flatMap(String value, Collector<Tuple2<String, Integer>> out) throws Exception {

                for (String s : value.split(" ")) {
                    out.collect(Tuple2.of(s, 1));
                }
            }
        });
    }
}
