package com.atguigu.flink.demo03;

import org.apache.flink.api.common.functions.FlatMapFunction;
import org.apache.flink.api.common.functions.MapFunction;
import org.apache.flink.api.common.typeinfo.Types;
import org.apache.flink.api.java.functions.KeySelector;
import org.apache.flink.api.java.tuple.Tuple2;
import org.apache.flink.streaming.api.datastream.DataStreamSource;
import org.apache.flink.streaming.api.datastream.SingleOutputStreamOperator;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.util.Collector;

import javax.swing.*;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.List;

import static org.apache.flink.api.common.typeinfo.Types.STRING;

/**
 * @author admin
 * @date 2021/8/9
 */
public class MyMap {

    public static void main(String[] args) throws Exception {
        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
        env.setParallelism(2);

        DataStreamSource<String> source = env.socketTextStream("hadoop102", 9999);

        SingleOutputStreamOperator<List<Tuple2<String, Integer>>> map = source.map((MapFunction<String, List<Tuple2<String, Integer>>>) value -> {
            String[] values = value.split(" ");
            List<Tuple2<String,Integer>> list = new ArrayList<>(values.length);
            Arrays.stream(values).forEach(s -> list.add(Tuple2.of(s, 1)));
            return list;
        }).returns(Types.LIST(Types.TUPLE(Types.STRING,Types.INT)));

        SingleOutputStreamOperator<Tuple2<String, Integer>> returns = map
                .flatMap((FlatMapFunction<List<Tuple2<String, Integer>>, Tuple2<String, Integer>>) (value, out) ->
                value.forEach(out::collect)).returns(Types.TUPLE(STRING, Types.INT));

        SingleOutputStreamOperator<Tuple2<String, Integer>> sum = returns.keyBy(value -> value.f0).sum(1);


        sum.print("print====>");
        sum.shuffle().print("shuffle====>");

        env.execute();
    }
}
