package com.chc.sparrow;

import org.apache.commons.lang3.StringUtils;
import org.apache.flink.api.common.eventtime.WatermarkStrategy;
import org.apache.flink.api.common.typeinfo.Types;
import org.apache.flink.api.java.tuple.Tuple2;
import org.apache.flink.configuration.Configuration;
import org.apache.flink.connector.file.src.FileSource;
import org.apache.flink.connector.file.src.reader.TextLineInputFormat;
import org.apache.flink.core.fs.Path;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.util.Collector;

/**
 * 20250110
 * flink file读取操作
 * 需引入flink-connector-files依赖
 */
public class FlinkFileApp {


    public static void main(String[] args) throws Exception {
        StreamExecutionEnvironment env = StreamExecutionEnvironment.createLocalEnvironmentWithWebUI(new Configuration());

        FileSource<String> fileSource = FileSource.
                forRecordStreamFormat(new TextLineInputFormat(), new Path("input/text.txt"))
                .build();

        env.fromSource(fileSource, WatermarkStrategy.noWatermarks(),"fileSource")
                .flatMap((String lines, Collector<Tuple2<String,Integer>> out) -> {
                    if (StringUtils.isNotBlank(lines)) {
                        String[] strs = lines.split(" ");
                        for (String str : strs) {
                            if (StringUtils.isNotBlank(str)) {
                                out.collect(Tuple2.of(str,1));
                            }
                        }
                    }
                }).returns(Types.TUPLE(Types.STRING,Types.INT))
                .keyBy(e -> e.f0)
                .sum(1)
                .print();
        env.execute();
    }
}
