package com.lengxf.flink.source;

import org.apache.flink.api.common.RuntimeExecutionMode;
import org.apache.flink.api.common.eventtime.WatermarkStrategy;
import org.apache.flink.api.common.functions.FlatMapFunction;
import org.apache.flink.api.common.typeinfo.Types;
import org.apache.flink.api.java.tuple.Tuple2;
import org.apache.flink.connector.file.src.FileSource;
import org.apache.flink.connector.file.src.reader.TextLineInputFormat;
import org.apache.flink.core.fs.Path;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;

/**
 * 无界数据读取
 * 事件驱动形
 *
 * @author Lengxf
 */
//运行是增加参数--add-opens java.base/java.lang=ALL-UNNAMED --add-opens java.base/java.util=ALL-UNNAMED --add-opens java.base/java.nio=ALL-UNNAMED --add-opens java.base/sun.nio.ch=ALL-UNNAMED
public class FileSourceDemo {

    //算子并行度优先级
    //算子>代码：env >提交时指定> 配置文件
    public static void main(String[] args) throws Exception {
        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
        env.setParallelism(12);

        FileSource<String> fileSource = FileSource.forRecordStreamFormat(new TextLineInputFormat(),
                new Path("flink/src/main/resources/word.txt")).build();

        env.fromSource(fileSource, WatermarkStrategy.noWatermarks(), "fileSource")
                .flatMap(getStringTuple2FlatMapFunction())
                .returns(Types.TUPLE(Types.STRING, Types.INT))
                .keyBy(v -> v.f0)
                .sum(1)
                .print();
        //批处理 全部一起出来
        env.setRuntimeMode(RuntimeExecutionMode.BATCH);
        //流处理 来一条处理一条

        //二者打印结果不同
        env.execute();
    }

    private static FlatMapFunction<String, Tuple2<String, Integer>> getStringTuple2FlatMapFunction() {
        return (s, collector) -> {
            String[] split = s.split(" ");
            for (String word : split) {
                collector.collect(Tuple2.of(word, 1));
            }
        };
    }
}
