package cn._51doit.flink.day02.transformations;

import org.apache.flink.api.common.typeinfo.Types;
import org.apache.flink.api.java.functions.KeySelector;
import org.apache.flink.api.java.tuple.Tuple2;
import org.apache.flink.streaming.api.datastream.DataStreamSource;
import org.apache.flink.streaming.api.datastream.KeyedStream;
import org.apache.flink.streaming.api.datastream.SingleOutputStreamOperator;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;

public class KeyByDemo2 {

    public static void main(String[] args) throws Exception {


        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();

        //spark
        //hadoop
        DataStreamSource<String> lines = env.socketTextStream("localhost", 8888);
        //(spark, 1)
        //(hadoop, 1)
        SingleOutputStreamOperator<Tuple2<String, Integer>> tpStream = lines.map(w -> Tuple2.of(w, 1)).returns(Types.TUPLE(Types.STRING, Types.INT));

        //传入下标，但是只适用于DataStream中为Tuple类型的
        KeyedStream<Tuple2<String, Integer>, String> keyed = tpStream.keyBy(t -> t.f0);

        keyed.print();

        env.execute();


    }
}
