package com.leilei.source.file;

import org.apache.flink.api.common.RuntimeExecutionMode;
import org.apache.flink.api.common.functions.FlatMapFunction;

import org.apache.flink.streaming.api.scala.DataStream;
import org.apache.flink.streaming.api.scala.StreamExecutionEnvironment;
import org.apache.flink.util.Collector;

import java.util.Arrays;

/**
 * @author lei
 * @version 1.0
 * @date 2021/3/8 21:36
 * @desc flink 基于文件获取数据源  数据一行一行分割
 */
public class FlinkSourceByFile {
    public static void main(String[] args) {
        //准备环境 env
        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
        env.setRuntimeMode(RuntimeExecutionMode.STREAMING);
        env.setParallelism(1);
        //从文件中加载数据 可以从本地 文件、文件夹/压缩文件（目录下所有内容都会读取）、甚至HDFS等
        // DataStream<String> source = env.readTextFile("E:\\aa\\flink-learn-1.12\\flink-learn-2-source\\src\\main\\java\\com\\leilei\\source\\file\\books.md");
        DataStream<String> sourceDir = env.readTextFile(
                "E:\\aa\\flink-learn-1.12\\flink-learn-2-source\\src\\main\\java\\com\\leilei\\source\\file");
        //数据处理
        //数据收集 sink
        //source.print();
        sourceDir.print("dir");
        //程序执行 execute
        env.execute();
    }
}
