package com.zili.source;

import org.apache.flink.streaming.api.datastream.DataStreamSource;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;

import java.util.ArrayList;

/**
 * @author : ranzlupup
 * @date : 2023/3/7 11:23
 */
public class SourceTest {
    public static void main(String[] args) throws Exception {
        // 创建执行环境
        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
        env.setParallelism(1);

        // 1.从文件中读取数据
        DataStreamSource<String> stream1 = env.readTextFile("input/clicks.csv");

        // 2.从集合中读取数据
        ArrayList<Event> clicks = new ArrayList<>();
        clicks.add(new Event("Mary","./home",1000L));
        clicks.add(new Event("Bob","./cart",2000L));
        DataStreamSource<Event> stream2 = env.fromCollection(clicks);

        // 3.从元素中读取数据
        DataStreamSource<Event> stream3 = env.fromElements(
                new Event("Mary", "./home", 1000L),
                new Event("Bob", "./cart", 2000L)
        );

        // 4.从 Socket 读取数据
        DataStreamSource<String> stream4 = env.socketTextStream("hadoop102", 9999);


        // 打印结果
        // stream1.print();
        // stream2.print();
        // stream3.print();
        stream4.print();
        // 启动执行
        env.execute();

    }
}
