package com.zzy.chapter05.source;

import entity.Event;
import org.apache.flink.streaming.api.datastream.DataStreamSource;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;

import java.util.ArrayList;

/**
 * source
 * @author wqzhzy
 */
public class SourceTest {
    public static void main(String[] args) throws Exception{
        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
        env.setParallelism(1);

        ArrayList<Event> clicks = new ArrayList<>();
        clicks.add(new Event("Mary", "./home", 1000L));
        clicks.add(new Event("Bob", "./cart", 2000L));

        //从集合中读取
//        DataStreamSource<Event> source = env.fromCollection(clicks);
//        DataStreamSource<Event> source = env.fromElements(
//                new Event("Mary", "./home", 1000L),
//                new Event("Bob", "./cart", 2000L)
//        );

        //从文本中读取
        //可以引入hadoop-client后从hdfs://...读取
//        DataStreamSource<String> source = env.readTextFile("input/words.txt");

        //从socket文本流读取
        //DataStreamSource<String> source = env.socketTextStream("hadoop100", 7777);

        //从kafka中读取
//        Properties props = new Properties();
//        props.setProperty("bootstrap.servers","hadoop100:9092");
//        props.setProperty("group.id", "ConsumerGroup");
//        props.setProperty("key.deserializer","org.apache.kafka.common.serialization.StringDeserializer");
//        props.setProperty("value.deserializer", "org.apache.kafka.common.serialization.StringDeserializer");
//        props.setProperty("auto.offset.rest", "latest");
//        DataStreamSource<String> source = env.addSource(new FlinkKafkaConsumer<String>(
//                "clicks_topic",
//                new SimpleStringSchema(),
//                props
//        ));

        //自定义source
        DataStreamSource<Event> source = env.addSource(new ClickSource());
        source.print();
        env.execute();
    }
}
