package com.lrj.datastream.source;

import com.lrj.datastream.domain.ClickEvent;
import org.apache.flink.api.common.serialization.SimpleStringSchema;
import org.apache.flink.api.java.utils.ParameterTool;
import org.apache.flink.streaming.api.datastream.DataStreamSource;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.streaming.connectors.kafka.FlinkKafkaConsumer;
import org.apache.kafka.clients.consumer.ConsumerConfig;
import org.apache.kafka.common.serialization.StringDeserializer;

import java.util.ArrayList;
import java.util.List;
import java.util.Properties;

/**
 * 1.有界流
 * - readTextFile
 * - fromCollection
 * - fromElements
 * 2.无界流
 * - socketTextStream 测试用
 * - kafka 生产
 *
 * @author lrj
 * @date 2022/3/28 13:32
 */
public class TestStreamSource {
    public static void main(String[] args) throws Exception {
        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
        env.setParallelism(1);
        // 1.从文件中获取数据
        DataStreamSource<String> textFile = env.readTextFile("input/source/click.txt");
        textFile.print();
        // 2.从集合中获取数据
        List<Integer> eventList = new ArrayList<>();
        eventList.add(1);
        eventList.add(2);
        eventList.add(3);
        DataStreamSource<Integer> collectionStream = env.fromCollection(eventList);
        collectionStream.print("3");
        // 3.从元素获取数据
        DataStreamSource<ClickEvent> eleStream = env.fromElements(
                new ClickEvent("Mary", "./home", 1000L),
                new ClickEvent("Blob", "./home", 2000L),
                new ClickEvent("Mary", "./home", 3000L));
        eleStream.print();
        ParameterTool parameterTool = ParameterTool.fromArgs(args);
        // 4.从socketText
        boolean socketText = parameterTool.getBoolean("socket", false);
        if (socketText) {
            socket(env, parameterTool);
        }
        // 5.kafka connector连接器
        boolean enableKafka = parameterTool.getBoolean("kafka", false);
        if (enableKafka) {
            kafka(env, parameterTool);
        }
        env.execute(TestStreamSource.class.getName());
    }

    private static void socket(StreamExecutionEnvironment env, ParameterTool parameterTool) {
        DataStreamSource<String> socketTextStream = env
                .socketTextStream(
                        parameterTool.get("host", "localhost"),
                        parameterTool.getInt("port", 10000)
                );
        socketTextStream.print("4");
    }

    private static void kafka(StreamExecutionEnvironment env, ParameterTool parameterTool) {
        Properties props = new Properties();
        props.put(ConsumerConfig.BOOTSTRAP_SERVERS_CONFIG,
                parameterTool.get("bootstrapServer", "localhost:9092"));
        props.put(ConsumerConfig.GROUP_ID_CONFIG,
                parameterTool.get("groupId", "test"));
        props.put(ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG, StringDeserializer.class.getName());
        props.put(ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG, StringDeserializer.class.getName());
        props.put(ConsumerConfig.AUTO_OFFSET_RESET_CONFIG, "latest");
        FlinkKafkaConsumer<String> consumer = new FlinkKafkaConsumer<>("click", new SimpleStringSchema(), props);
        DataStreamSource<String> kafkaStream = env.addSource(consumer);
        kafkaStream.print();
    }
}
