package cn._51doit.flink.day02.transformations;

import org.apache.flink.api.common.typeinfo.Types;
import org.apache.flink.api.java.functions.KeySelector;
import org.apache.flink.api.java.tuple.Tuple;
import org.apache.flink.api.java.tuple.Tuple2;
import org.apache.flink.streaming.api.datastream.DataStreamSource;
import org.apache.flink.streaming.api.datastream.KeyedStream;
import org.apache.flink.streaming.api.datastream.SingleOutputStreamOperator;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;

public class KeyByDemo3 {

    public static void main(String[] args) throws Exception {


        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();

        //spark
        //hadoop
        DataStreamSource<String> lines = env.socketTextStream("localhost", 8888);
        //(spark, 1)
        //(hadoop, 1)
        SingleOutputStreamOperator<WordCount> wcStream = lines.map(w -> new WordCount(w, 1));

        KeyedStream<WordCount, Tuple> keyed = wcStream.keyBy("word");

//        KeyedStream<WordCount, String> keyed = wcStream.keyBy(new KeySelector<WordCount, String>() {
//            @Override
//            public String getKey(WordCount wordCount) throws Exception {
//                return wordCount.getWord();
//            }
//        });

        //KeyedStream<WordCount, String> keyed = wcStream.keyBy(wc -> wc.getWord());

        keyed.print();

        env.execute();


    }


}
