package com.atguigu.day01;

import org.apache.flink.api.common.typeinfo.Types;
import org.apache.flink.api.java.tuple.Tuple2;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.util.Collector;

/**
 * @author Felix
 * @date 2024/3/29
 * 该案例演示了以流的形式对无界数据进行处理---泛型问题
 */
public class Flink04_WC_Stream_UnBound_Generic {
    public static void main(String[] args) throws Exception {
        //TODO 1.指定流处理环境
        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
        env
                .socketTextStream("hadoop102", 8888)
                /*.flatMap(
                        new FlatMapFunction<String, Tuple2<String, Long>>() {
                            @Override
                            public void flatMap(String lineStr, Collector<Tuple2<String, Long>> out) throws Exception {
                                String[] words = lineStr.split(" ");
                                for (String word : words) {
                                    out.collect(Tuple2.of(word, 1L));
                                }
                            }
                        }
                )*/
                .flatMap(
                        (String lineStr, Collector<Tuple2<String,Long>> out)->{
                            String[] words = lineStr.split(" ");
                            for (String word : words) {
                                out.collect(Tuple2.of(word, 1L));
                            }
                        }
                )
                //显式声明返回的类型
                .returns(Types.TUPLE(Types.STRING,Types.LONG))
                .keyBy(0)
                .sum(1)
                .print();
        //TODO 7.提交作业
        env.execute();
    }
}
