package com.apex.spark.source;

import com.apex.spark.SparkEnvironment;
import com.apex.spark.stream.SparkStreamingSource;
import com.typesafe.config.Config;
import org.apache.spark.sql.Dataset;
import org.apache.spark.sql.Row;
import org.apache.spark.streaming.StreamingContext;
import org.apache.spark.streaming.kafka010.ConsumerStrategies;
import org.apache.spark.streaming.kafka010.KafkaUtils;
import org.apache.spark.streaming.kafka010.LocationStrategies;
import java.util.Collections;
import java.util.HashMap;
import java.util.Map;

public class KafkaDStreamingSource implements SparkStreamingSource {
    private Config config;
    private String topics;
    private Map<String,Object> kafkaParams = new HashMap<>();
    @Override
    public void prepare(SparkEnvironment plugin) {

    }

    @Override
    public Config getConfig() {
        return config;
    }

    @Override
    public void setConfig(Config config) {
        this.config = config;
    }

    @Override
    public Dataset<Row> getData(SparkEnvironment environment) {
        StreamingContext streamingContext = environment.getStreamingContext();
        KafkaUtils.createDirectStream(streamingContext,
                LocationStrategies.PreferConsistent(),
                ConsumerStrategies.Subscribe(Collections.singletonList(topics), kafkaParams));
        return null;
    }
}
