package com.apexsoft.util;

import com.apexsoft.service.newconsumer;
import org.apache.kafka.clients.consumer.ConsumerRecord;
import org.apache.kafka.common.serialization.StringDeserializer;
import org.apache.spark.SparkConf;
import org.apache.spark.api.java.function.FlatMapFunction;
import org.apache.spark.api.java.function.PairFunction;
import org.apache.spark.streaming.Durations;
import org.apache.spark.streaming.api.java.JavaDStream;
import org.apache.spark.streaming.api.java.JavaInputDStream;
import org.apache.spark.streaming.api.java.JavaPairDStream;
import org.apache.spark.streaming.api.java.JavaStreamingContext;
import org.apache.spark.streaming.kafka010.ConsumerStrategies;
import org.apache.spark.streaming.kafka010.KafkaUtils;
import org.apache.spark.streaming.kafka010.LocationStrategies;
import scala.Tuple2;

import java.util.*;

/**
 * Created by caigx
 * Date: 2020/9/20
 * Time: 20:33
 * Description: No Description
 */
public class DStreamUtil {
    public static String sql="select * from updates ";
    public static void main(String[] args) throws Exception {
        SparkConf sparkConf  = new SparkConf().setMaster("local[*]").setAppName("SparkStreamingFromkafka");
                JavaStreamingContext streamingContext = new JavaStreamingContext(sparkConf , Durations.seconds(2));

                Map<String, Object> kafkaParams = new HashMap<>();
                 kafkaParams.put("bootstrap.servers", "pro1:9092");//多个可用ip可用","隔开
                 kafkaParams.put("key.deserializer", StringDeserializer.class);
                 kafkaParams.put("value.deserializer", StringDeserializer.class);
                 kafkaParams.put("group.id", "test1");
                 Collection<String> topics = Arrays.asList("test1");//配置topic，可以是数组

                 JavaInputDStream<ConsumerRecord<String, String>> javaInputDStream = KafkaUtils.createDirectStream(
                                 streamingContext,
                                 LocationStrategies.PreferConsistent(),
                                 ConsumerStrategies.Subscribe(topics, kafkaParams));

        JavaDStream<String> objectJavaDStream = javaInputDStream.flatMap(new FlatMapFunction<ConsumerRecord<String, String>, String>() {
            @Override
            public Iterator<String> call(ConsumerRecord<String, String> stringStringConsumerRecord) throws Exception {
                System.out.println(stringStringConsumerRecord.value());
                sql=stringStringConsumerRecord.value();
                System.out.println("-->"+sql);
                newconsumer.id=sql;
                System.out.println("=>"+newconsumer.id);
                return Arrays.asList(stringStringConsumerRecord.value()).iterator();
            }
        });
        System.out.println("============>"+sql);
        objectJavaDStream.print();
        streamingContext.start();
                streamingContext.awaitTermination();
    }
    public  String getI(){
        return this.sql;
    }
}
