package com.atguigu.flink.datastramapi.source;

import org.apache.flink.streaming.api.datastream.DataStreamSource;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;

import java.util.Arrays;
import java.util.List;

/**
 * Created by Smexy on 2023/2/24
 */
public class Demo1_TestSource
{
    public static void main(String[] args) {

        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();

        env.setParallelism(1);


        //测试的目的，可以快速创建
        //基于元素创建   非并行
        DataStreamSource<Integer> ds = env.fromElements(1, 2, 3, 4, 5);

        //基于集合创建  非并行
        List<String> list = Arrays.asList("a", "b", "c");
        DataStreamSource<String> ds1 = env.fromCollection(list);

        //读文件  文件系统可以是本地的，也可以是HDFS(引入hadoop的客户端)。 默认读取UTF-8编码的文件
        DataStreamSource<String> ds3 = env.readTextFile("hdfs://hadoop102:9820/input").setParallelism(2);
        //ds.print();
        //ds1.print();
        ds3.print();


        try {
            env.execute();
        } catch (Exception e) {
            e.printStackTrace();
        }


    }
}
