package com.sanfu.etl.service.impl;

import com.sanfu.etl.config.FlinkKafkaConsumerConfiguration;
import com.sanfu.etl.service.FlinkKafkaService;
import com.sanfu.etl.utils.StreamEnvironmentUtils;
import org.apache.flink.api.java.ExecutionEnvironment;
import org.apache.flink.streaming.api.datastream.DataStream;
import org.apache.flink.streaming.api.datastream.DataStreamSource;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.streaming.connectors.kafka.FlinkKafkaConsumer;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.beans.factory.annotation.Qualifier;
import org.springframework.stereotype.Service;

import java.util.ArrayList;
import java.util.HashMap;
import java.util.Map;

// *********************
// ** @Date: 2021-06-11
// ** @Author: Camemax
// ** @Description: ①实现Kafka作为Flink数据流源[输入]方法的逻辑代码 ②实现Kafka作为Flink数据流源[输出]方法的逻辑代码
// *********************
@Service("flinkKafkaService")
public class FlinkKafkaServiceImpl implements FlinkKafkaService
{
    @Autowired
    FlinkKafkaConsumerConfiguration configuration;

    @Override
    public ArrayList<DataStream<String>> loadDataStreams() {

        StreamExecutionEnvironment streamEnv = StreamEnvironmentUtils.getStreamEnv();

        // 加载两个输入数据流
        FlinkKafkaConsumer<String> consumerA = configuration.stringFlinkKafkaConsumerA();
        FlinkKafkaConsumer<String> consumerB = configuration.stringFlinkKafkaConsumerB();

        DataStream<String> dataStreamA = streamEnv.addSource(consumerA);
        DataStream<String> dataStreamB = streamEnv.addSource(consumerB);

        ArrayList<DataStream<String>> dataStreams = new ArrayList<>();
        dataStreams.add(dataStreamA);
        dataStreams.add(dataStreamB);

        return dataStreams;
    }

    @Override
    public DataStream<String> outDataStream() {
        return null;
    }
}
