package com.shujia.flink.core;

import org.apache.flink.api.common.functions.FlatMapFunction;
import org.apache.flink.api.common.typeinfo.Types;
import org.apache.flink.api.java.tuple.Tuple2;
import org.apache.flink.streaming.api.datastream.DataStream;
import org.apache.flink.streaming.api.datastream.DataStreamSource;
import org.apache.flink.streaming.api.datastream.KeyedStream;
import org.apache.flink.streaming.api.datastream.SingleOutputStreamOperator;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.util.Collector;

import java.util.ArrayList;

public class Demo1WordCount {
    public static void main(String[] args) throws Exception {
        //1、创建flink执行环境
        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();

        //设置flink任务执行的并行度
        env.setParallelism(2);

        /*
         * flink数据从上游发送到下游的方式
         * 1、时间：200毫秒
         * 2、数据大小：32kb
         */
        env.setBufferTimeout(0);

        //2、读取数据，nc -lk 8888  --无界流
        DataStream<String> linesDS = env.socketTextStream("master", 8888);

        /*
         * 统计单词的数量
         */
        //将一行转换成多行
        //lambda表达式
        DataStream<String> wordsDS = linesDS.flatMap((line, out) -> {
            //将一行切分成一个数组
            String[] split = line.split(",");
            //循环将数据发生到下游
            for (String word : split) {
                //将数据发送到下游
                out.collect(word);
            }
        }, Types.STRING);//指定返回的类型信息

        //转换成kv格式
        //Types.TUPLE(Types.STRING, Types.INT) 指定返回的类型
        DataStream<Tuple2<String, Integer>> kvDS = wordsDS
                .map(word -> Tuple2.of(word, 1), Types.TUPLE(Types.STRING, Types.INT));

        //安装单词进行分组
        KeyedStream<Tuple2<String, Integer>, String> keyByDS = kvDS.keyBy(kv -> kv.f0);

        //统计单词的数量
        DataStream<Tuple2<String, Integer>> countDS = keyByDS.sum(1);

        //打印结果
        countDS.print();

        //启动flink
        env.execute();
    }
}
