package ex.other;

import ex.datastream.functions.function.FlatMapFuncBySplitter01;
import org.apache.flink.api.java.DataSet;
import org.apache.flink.api.java.ExecutionEnvironment;
import org.apache.flink.api.java.tuple.Tuple2;

public class WordCount {
    public static void main(String[] args) throws Exception {

        final ExecutionEnvironment env=ExecutionEnvironment.getExecutionEnvironment();

        DataSet<String> text=env.fromElements(
                "Flink Spark Storm",
                "Hadoop Spark Flink"
        );


        DataSet<Tuple2<String,Integer>> counts=text.flatMap(new FlatMapFuncBySplitter01())
                .groupBy(0)
                .sum(1);
        counts.printToErr();
    }

}
