package com.atguigu.flink.wordcount;

import com.atguigu.flink.pojo.WordCount;
import org.apache.flink.api.common.functions.FlatMapFunction;
import org.apache.flink.api.java.functions.KeySelector;
import org.apache.flink.api.java.tuple.Tuple2;
import org.apache.flink.streaming.api.datastream.DataStreamSource;
import org.apache.flink.streaming.api.datastream.SingleOutputStreamOperator;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.util.Collector;

/**
 * Created by Smexy on 2023/1/12
 *
        数据模型，如果使用Tuple，最多支持封装25个字段！
        字段变多时，可读性差。
        可以使用POJO封装数据！
 *
 *
 */
public class Demo4_UnBoundedStreamPOJO
{
    public static void main(String[] args) throws Exception {

        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();

        env.setParallelism(1);

        DataStreamSource<String> ds = env.socketTextStream("hadoop103",8888);

        SingleOutputStreamOperator<WordCount> ds1 = ds.flatMap(new FlatMapFunction<String, WordCount>()
        {
            @Override
            public void flatMap(String line, Collector<WordCount> out) throws Exception {
                String[] words = line.split(" ");
                for (String word : words) {
                    out.collect(new WordCount(word,1));
                }
            }
        });


        ds1.keyBy(new KeySelector<WordCount, String>()
        {
            @Override
            public String getKey(WordCount data) throws Exception {
                return data.getWord();
            }
        })
          .sum("count")
          .print();


        env.execute();


    }
}
