package com.atguigu.wordcount;

import com.atguigu.pojo.WordCount;
import org.apache.flink.api.common.functions.FlatMapFunction;
import org.apache.flink.api.common.typeinfo.Types;
import org.apache.flink.api.java.functions.KeySelector;
import org.apache.flink.api.java.tuple.Tuple2;
import org.apache.flink.streaming.api.datastream.DataStreamSource;
import org.apache.flink.streaming.api.datastream.KeyedStream;
import org.apache.flink.streaming.api.datastream.SingleOutputStreamOperator;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.util.Collector;

public class Flink06_POJOWordCount {

    public static void main(String[] args) {
        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();

        env.setParallelism(1);

        DataStreamSource<String> ds = env.socketTextStream("hadoop102", 9999);

        SingleOutputStreamOperator<WordCount> flatMapds = ds.flatMap(
                new FlatMapFunction<String, WordCount>() {
                    @Override
                    public void flatMap(String line, Collector<WordCount> out) throws Exception {
                        String[] words = line.split(" ");
                        for (String word : words) {
                            out.collect(new WordCount(word,1L));
                        }

                    }
                }
        );

        KeyedStream<WordCount, String> keyBy = flatMapds.keyBy(
                new KeySelector<WordCount, String>() {

                    @Override
                    public String getKey(WordCount wordCount) throws Exception {
                        return wordCount.getWord();
                    }
                }
        );

        SingleOutputStreamOperator<WordCount> sumDs = keyBy.sum("count");

        sumDs.print();

        try {
            env.execute();
        } catch (Exception e) {
            throw new RuntimeException(e);
        }
    }
}

