package com.atguigu.flink.wordcount;

import org.apache.flink.api.common.functions.FlatMapFunction;
import org.apache.flink.api.java.functions.KeySelector;
import org.apache.flink.api.java.tuple.Tuple2;
import org.apache.flink.streaming.api.datastream.DataStreamSource;
import org.apache.flink.streaming.api.datastream.SingleOutputStreamOperator;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.util.Collector;

/**
 * Created by Smexy on 2022/12/10
 *
 *      无界流：  有始无终
 *      模拟无界流：    sudo yum -y install nc
 *          绑定服务端: nc -lk 主机名 端口
 *          连接服务端: nc 主机名 端口 (了解)，使用flink去连接
 */
public class Demo4_InterfaceImpl
{
    public static void main(String[] args) throws Exception {

        //1.获取编程环境
        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();

        //全局只让一个Task运算
        env.setParallelism(1);

        env.socketTextStream("hadoop103",8888)
           //外部类实现
           .flatMap(new MyFlatMapFunction())
           .keyBy(new KeySelector<Tuple2<String, Integer>, String>()
                  {
                      //把输入的每个元素  Tuple2<String, Integer>的哪部分作为key输出
                      @Override
                      public String getKey(Tuple2<String, Integer> ele) throws Exception {
                          return ele.f0;
                      }
                  }
                  )
           .sum(1)
           .print();


        //5.触发运行
        env.execute();

    }

    //内部类实现
    private static class MyFlatMapFunction implements FlatMapFunction<String, Tuple2<String,Integer>>
    {
        @Override
        public void flatMap(String inputLine, Collector<Tuple2<String, Integer>> collector) throws Exception {
            String[] words = inputLine.split(" ");

            for (String word : words) {
                collector.collect(Tuple2.of(word, 1));
            }
        }
    }
}
