package com.atguigu.flink.wordcount;

import com.atguigu.flink.pojo.WordCount;
import org.apache.flink.api.common.functions.FlatMapFunction;
import org.apache.flink.api.java.functions.KeySelector;
import org.apache.flink.api.java.tuple.Tuple2;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.util.Collector;

/**
 * Created by Smexy on 2022/12/10
 *
 *
 *      缺点： 麻烦，需要自己编写POJO
 *
 *      优点： 字段有了元数据说明。
 *                  Tuple2(f0,f1): 只能通过位置声明，字段没有意义。
 *                  POJO: 每个属性都有属性名
 *
 *             Tuple2： 能封装的数据个数是有限的。
 *                      只能封装两个属性。
 *
 *                      如果数据字段过多，最多只能封装到Tuple25。
 *
 *
 *
 */
public class Demo5_POJOImpl
{
    public static void main(String[] args) throws Exception {

        //1.获取编程环境
        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();

        //全局只让一个Task运算
        env.setParallelism(1);

        env.socketTextStream("hadoop103",8888)
           //外部类实现
           .flatMap(new MyFlatMapFunction())
           .keyBy(new KeySelector<WordCount, String>()
           {
               @Override
               public String getKey(WordCount wordCount) throws Exception {
                   return wordCount.getWord();
               }
           })
           .sum("count")
           .print();


        //5.触发运行
        env.execute();

    }

    //内部类实现
    private static class MyFlatMapFunction implements FlatMapFunction<String, WordCount>
    {
        @Override
        public void flatMap(String inputLine, Collector<WordCount> collector) throws Exception {
            String[] words = inputLine.split(" ");

            for (String word : words) {
                collector.collect(new WordCount(word,1));
            }
        }
    }
}
