package com.pw.study.flink.exercise;

import org.apache.flink.api.common.RuntimeExecutionMode;
import org.apache.flink.api.common.functions.FlatMapFunction;
import org.apache.flink.api.common.typeinfo.Types;
import org.apache.flink.api.java.tuple.Tuple2;
import org.apache.flink.streaming.api.datastream.SingleOutputStreamOperator;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.util.Collector;

import java.util.Arrays;


/**
 * @Author: linux_future
 * @since: 2022/3/12
 **/
public class WordCount {
    public static void main(String[] args) {
        readFile();
        readFile2();
        readFile3();
    }

    private static void readFile3() {
        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
        env.setParallelism(1);
        System.out.println("==================无界流==============");
        env.socketTextStream("hadoop111", 999)
                .flatMap(new FlatMapFunction<String, String>() {
                    @Override
                    public void flatMap(String value, Collector<String> out) throws Exception {
                        Arrays.stream(value.split(" ")).forEach(out::collect);
                    }
                }).map(value -> Tuple2.of(value, 1L)).returns(Types.TUPLE(Types.STRING, Types.LONG))
                .keyBy(t -> t.f0).sum(1).print();
        try {
            env.execute();
        } catch (Exception e) {
            e.printStackTrace();
        }

    }

    private static void readFile2() {
        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
        env.setParallelism(1);
        System.out.println("==================有界流==============");
        env.readTextFile("data/file/words.txt").flatMap(new FlatMapFunction<String, String>() {
                    @Override
                    public void flatMap(String value, Collector<String> out) throws Exception {
                        Arrays.stream(value.split(" ")).forEach(out::collect);
                    }
                }).map(value -> Tuple2.of(value, 1L)).returns(Types.TUPLE(Types.STRING, Types.LONG))
                .keyBy(t -> t.f0).sum(1).print();
        try {
            env.execute();
        } catch (Exception e) {
            e.printStackTrace();
        }
    }

    private static void readFile() {
        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
        env.setParallelism(1);
        System.out.println("=====================BATCH===============");
        env.setRuntimeMode(RuntimeExecutionMode.BATCH);
        SingleOutputStreamOperator<String> data = env.readTextFile("data/file/words.txt").flatMap(new FlatMapFunction<String, String>() {
            @Override
            public void flatMap(String value, Collector<String> out) throws Exception {
                String[] data = value.split(" ");
                for (String word : data) {
                    out.collect(word);
                }
            }
        });
        data.map(value -> Tuple2.of(value, 1L))
                .returns(Types.TUPLE(Types.STRING, Types.LONG)).keyBy(t -> t.f0)
                .sum(1).print();
        try {
            env.execute();
        } catch (Exception e) {
            e.printStackTrace();
        }
    }

}
