package cn.xuexiyuan.flinkstudy.source;

import org.apache.flink.streaming.api.datastream.DataStreamSource;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;

import java.util.Arrays;

/**
 * @Description:
 * @Author 左龙龙
 * @Date 21-3-23
 * @Version 1.0
 **/
public class SourceDemo01_Collection {
    public static void main(String[] args) throws Exception {
        // 0.env
        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();

        // 1.source
        // env.fromElements(可变参数)
        DataStreamSource<String> ds1 = env.fromElements("java flink spark", "hello");
        // env.fromCollection(各种集合)
        DataStreamSource<String> ds2 = env.fromCollection(Arrays.asList("java flink spark", "hadoop"));
        // env.generateSequence(开始, 结束)
        DataStreamSource<Long> ds3 = env.generateSequence(0, 10);
        // env.fromSequence(开始, 结束)
        DataStreamSource<Long> ds4 = env.fromSequence(0, 10);

        // 2.transformation

        // 3.sink
        ds1.print();
        ds2.print();
        ds3.print();
        ds4.print();

        // 4.excute
        env.execute("SourceDemo01_Collection");

    }
}
