package com.atguigu.flink.wordcount;

import com.atguigu.flink.pojo.WordCount;
import org.apache.flink.api.common.functions.FlatMapFunction;
import org.apache.flink.api.java.functions.KeySelector;
import org.apache.flink.api.java.tuple.Tuple2;
import org.apache.flink.configuration.Configuration;
import org.apache.flink.streaming.api.datastream.DataStreamSource;
import org.apache.flink.streaming.api.datastream.SingleOutputStreamOperator;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.util.Collector;

/**
 * Created by Smexy on 2023/3/31
 *
 *  输出数据时，可以使用flink提供的 tuple来封装数据。
 *      tuple最多支持 tuple25，意味着最多只能封装25个字段。
 *
 *      因此如果字段数量过多，tuple无法实现！
 *
 *      使用tuple不方便，程序可读性差。
 *
 *
 *      解决： 使用POJO解决！
 *              POJO (plain old java object 传统的java对象)
 *                  某种程度上  POJO = Bean
 *                ①class的修饰符必须是public
 *                ②必须有空参构造器
 *                ③必须为私有属性，提供公共的getter,setter
 *                ④属性的类型，必须java支持的可以序列化的类型
 *
 *
 */
public class Demo6_POJODemo
{
    public static void main(String[] args) throws Exception {

        Configuration conf = new Configuration();
        conf.setInteger("rest.port",3333);
        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment(conf);

        env.setParallelism(1);

        DataStreamSource<String> ds = env.socketTextStream("hadoop102", 8888);

        SingleOutputStreamOperator<WordCount> ds2 = ds
            .flatMap(new FlatMapFunction<String, WordCount>()
            {
                @Override
                public void flatMap(String line, Collector<WordCount> out) throws Exception {

                    String[] words = line.split(" ");
                    for (String word : words) {
                        out.collect(new WordCount(word, 1));
                    }
                }
            });

        ds2
            .keyBy(new KeySelector<WordCount, String>()
            {
                @Override
                public String getKey(WordCount value) throws Exception {
                    return value.getWord();
                }
            })
            //流中的数据类型是POJO，此时聚合时，写对应的属性名
            .sum("count")
            .print();


        env.execute();

    }
}
