package com.atguigu.flink.chapter01_wordcount;

import com.atguigu.flink.pojo.WordCount;
import org.apache.flink.api.common.functions.FlatMapFunction;
import org.apache.flink.api.java.functions.KeySelector;
import org.apache.flink.api.java.tuple.Tuple2;
import org.apache.flink.streaming.api.datastream.DataStreamSource;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.util.Collector;

/**
 * Created by Smexy on 2022/10/19
 *
 *      源源不断地处理数据，一直运行，一直计算。
 *
 *      数据模型: Bean
 *                  ①无参的构造器
 *                  ②为私有属性提供公共的getter，setter
 *      Flink :  POJO(plain old java object)
 *                  能被flink的框架序列化。
 */
public class Demo4_StreamingExecutionPOJO
{
    public static void main(String[] args) throws Exception {

        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
        //启动一个线程去算。不设置，默认用当前环境中所有的cpu作为线程数进行计算
        //env.setParallelism(1);

        //使用环境读取数据源，获取一个流
        DataStreamSource<String> source = env.socketTextStream("hadoop103", 8888);

        //对source进行转换，单词统计
        source
            .flatMap(new FlatMapFunction<String, WordCount>()
            {
                @Override
                public void flatMap(String value, Collector<WordCount> out) throws Exception {
                    String[] words = value.split(" ");
                    for (String word : words) {
                        out.collect(new WordCount(word,1));
                    }
                }
            })
            //类似之前的groupBy, keyBy代表按照key进行分组
            .keyBy(new KeySelector<WordCount, String>()
            {
                @Override
                public String getKey(WordCount value) throws Exception {
                    return value.getWord();
                }
            })
            .sum("count")
            .print();



        //启动执行环境，计算才会开始，永不结束
        env.execute();

    }
}
