package flink.demo;

import org.apache.flink.api.common.RuntimeExecutionMode;
import org.apache.flink.api.common.typeinfo.Types;
import org.apache.flink.api.java.tuple.Tuple2;
import org.apache.flink.streaming.api.datastream.DataStreamSource;
import org.apache.flink.streaming.api.datastream.KeyedStream;
import org.apache.flink.streaming.api.datastream.SingleOutputStreamOperator;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.util.Collector;

public class StreamBatchWordCount {


    public static void main(String[] args) {

        StreamExecutionEnvironment env=StreamExecutionEnvironment.getExecutionEnvironment();
        env.setParallelism(1);
        env.setRuntimeMode(RuntimeExecutionMode.BATCH);

        DataStreamSource<String> dataStreamSource = env.readTextFile("D:\\BaiduNetdiskDownload\\尚硅谷大数据技术之2022版Flink1.13（Java版）\\代码\\FlinkTutorial\\data\\wc\\input\\wc.txt");


        SingleOutputStreamOperator<String> operator = dataStreamSource.map(String::toUpperCase);


        SingleOutputStreamOperator<Tuple2<String, Integer>> tuple2SingleOutputStreamOperator = operator.flatMap((String s, Collector<Tuple2<String, Integer>> collector) -> {
            String[] split = s.split("\\s");
            for (String key : split) {
                collector.collect(Tuple2.of(key, 1));
            }
        }).returns(Types.TUPLE(Types.STRING, Types.INT));


        KeyedStream<Tuple2<String, Integer>, String> tuple2StringKeyedStream = tuple2SingleOutputStreamOperator.keyBy((value) -> value.f0);

        tuple2StringKeyedStream.sum(1).print();

        try {
            env.execute();
        } catch (Exception e) {
            e.printStackTrace();
        }
    }

}
