package org.flink.hello;

import org.apache.flink.api.common.functions.FlatMapFunction;
import org.apache.flink.api.java.ExecutionEnvironment;
import org.apache.flink.api.java.LocalEnvironment;
import org.apache.flink.api.java.operators.AggregateOperator;
import org.apache.flink.api.java.operators.DataSource;
import org.apache.flink.api.java.tuple.Tuple2;
import org.apache.flink.util.Collector;

import javax.lang.model.element.ExecutableElement;

/**
 * @Author: TongRui乀
 * @Date: 2020/10/31 16:20
 * @description： flink 入门案例 计算单词个数 基于DataSet 模式 批处理
 */
public class DateSetWordCount {

    public static void main(String[] args) throws Exception {

        // 1  创建 执行环境
        LocalEnvironment localEnv = ExecutionEnvironment.createLocalEnvironment();

        // 2  创建dataSet
        DataSource<String> source = localEnv.fromElements(
                "Flink Spark Storm",
                "Flink Flink Flink",
                "Spark Spark Spark",
                "Storm Storm Storm"
        );

        // 3   通过自定义的转换函数处理
        AggregateOperator<Tuple2<String, Integer>> result = source.flatMap(new LineSplitter())
                .groupBy(0)
                .sum(1);

        // 打印结果
        result.printToErr();
    }



}

class LineSplitter implements FlatMapFunction<String, Tuple2<String,Integer>>{

    /**
     *  s 是 dataset  也就是数据源
     * @param s
     * @param collector
     * @throws Exception
     */
    @Override
    public void flatMap(String s, Collector<Tuple2<String, Integer>> collector) throws Exception {
        String[] value = s.split("\\W+");

        for (String str : value) {
            if(str.length() > 0){
                // 计数
                collector.collect(new Tuple2<>(str,1));
            }
        }
    }
}
