package com.chis.kafka;

import com.chis.jm.bean.ChildSynInfoBean;
import com.chis.jmdataspark.comm.JedisSerializeUtil;
import org.apache.kafka.clients.consumer.ConsumerRecord;
import org.apache.kafka.common.TopicPartition;

import java.util.*;

import org.apache.spark.SparkConf;
import org.apache.spark.api.java.JavaRDD;
import org.apache.spark.api.java.JavaSparkContext;
import org.apache.spark.api.java.function.Function;
import org.apache.spark.api.java.function.VoidFunction;
import org.apache.spark.streaming.Durations;
import org.apache.spark.streaming.api.java.JavaDStream;
import org.apache.spark.streaming.api.java.JavaInputDStream;
import org.apache.spark.streaming.api.java.JavaStreamingContext;
import org.apache.spark.streaming.kafka010.ConsumerStrategies;
import org.apache.spark.streaming.kafka010.KafkaUtils;
import org.apache.spark.streaming.kafka010.LocationStrategies;

import static org.apache.kafka.clients.consumer.ConsumerConfig.*;
import static org.apache.kafka.clients.consumer.ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG;

public class SparkKafka {

    public static void main(String[] args) {
        try {
            Map<String, Object> kafkaProperties=new HashMap<>();
            kafkaProperties.put("enable.auto.commit" , "false");
            kafkaProperties.put("auto.offset.reset" , "latest");
            kafkaProperties.put("session.timeout.ms" , "50000");
            kafkaProperties.put("request.timeout.ms" , "51000");
            kafkaProperties.put(GROUP_ID_CONFIG, "wljgroup");
            kafkaProperties.put(BOOTSTRAP_SERVERS_CONFIG, "10.88.88.107:9092,10.88.88.106:9092,10.88.88.105:9092");
            kafkaProperties.put(KEY_DESERIALIZER_CLASS_CONFIG,Class.forName("org.apache.kafka.common.serialization.StringDeserializer"));
            kafkaProperties.put(VALUE_DESERIALIZER_CLASS_CONFIG,Class.forName("org.apache.kafka.common.serialization.StringDeserializer"));

            Collection<TopicPartition>  topicPartitions=new HashSet<>();
            topicPartitions.add(new TopicPartition("wlj1", 0));
            topicPartitions.add(new TopicPartition("wlj1", 1));
            topicPartitions.add(new TopicPartition("wlj1", 2));
            topicPartitions.add(new TopicPartition("wlj2", 0));
            topicPartitions.add(new TopicPartition("wlj2", 1));
            topicPartitions.add(new TopicPartition("wlj2", 2));

            Map<TopicPartition, Long> offsets=new HashMap<>();
            offsets.put(new TopicPartition("wlj1", 0), 0L);
            offsets.put(new TopicPartition("wlj1", 1), 0L);
            offsets.put(new TopicPartition("wlj1", 2), 0L);
            offsets.put(new TopicPartition("wlj2", 0), 0L);
            offsets.put(new TopicPartition("wlj2", 1), 0L);
            offsets.put(new TopicPartition("wlj2", 2), 0L);

            SparkConf sc = new SparkConf();
            sc.setMaster("local");
            sc.setAppName("wljspark");
            sc.set("spark.streaming.kafka.maxRatePerPartition", "100");
            sc.set("spark.streaming.stopGracefullyOnShutdown","true");
            sc.set("spark.streaming.kafka.consumer.poll.ms","50000");

            JavaStreamingContext jmc = new JavaStreamingContext(new JavaSparkContext(sc), Durations.seconds(5L));

            JavaInputDStream<ConsumerRecord<Object,Object>> jids= KafkaUtils.createDirectStream(jmc,
                    LocationStrategies.PreferConsistent(),
                    ConsumerStrategies.Assign(topicPartitions, kafkaProperties, offsets));
            JavaDStream<Object[]> redisdata = jids.map(new Function<ConsumerRecord<Object, Object>, Object[]>() {
                @Override
                public Object[] call(ConsumerRecord<Object, Object> s) throws Exception {
                    return new Object[]{s.topic(),s.partition(), s.offset(), s.value()};
                }
            });

            redisdata.foreachRDD(new VoidFunction<JavaRDD<Object[]>>() {
                @Override
                public void call(JavaRDD<Object[]> stringJavaRDD) throws Exception {
                    stringJavaRDD.foreachPartition(new VoidFunction<Iterator<Object[]>>() {
                        @Override
                        public void call(Iterator<Object[]> stringIterator) throws Exception {
                            try {
                                while (stringIterator.hasNext()) {
                                    Object[] obj = stringIterator.next();
                                    System.out.println("================topic:"+obj[0]+" partition:"+obj[1]+" offset："+obj[1]+" value:"+obj[1]);
                                }
                            } catch (Exception e) {
                                e.printStackTrace();
                                throw e;
                            }
                        }
                    });
                }
            });

            jmc.start();
            jmc.awaitTermination();

        } catch (Exception e){
            e.printStackTrace();
        }



    }
}
