package cn.itcast.flink.streaming;

import org.apache.flink.core.fs.FileSystem;
import org.apache.flink.streaming.api.datastream.DataStreamSource;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;

/**
 * Author itcast
 * Date 2021/9/2 20:51
 * Desc TODO
 */
public class WriteFile {
    public static void main(String[] args) throws Exception {
        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
        //默认生成并行度为 1
        env.setParallelism(1);
        DataStreamSource<String> source = env.fromElements("hello", "world", "hadoop", "flink", "spark");
        //sink 到本地设置并行度为3 ，看是否会生成三个文件
        source.writeAsText("D:\\project\\flink-base-sz23\\data\\writefile", FileSystem.WriteMode.OVERWRITE)
        .setParallelism(3);
        env.execute();
    }
}
