package com.tzg157.fitness.kafka;

import cn.hutool.core.util.StrUtil;
import cn.hutool.json.JSONObject;
import cn.hutool.json.JSONUtil;
import lombok.RequiredArgsConstructor;
import org.apache.kafka.clients.consumer.ConsumerRecord;
import org.apache.kafka.common.serialization.StringDeserializer;
import org.apache.spark.SparkConf;
import org.apache.spark.sql.SparkSession;
import org.apache.spark.streaming.Durations;
import org.apache.spark.streaming.api.java.JavaDStream;
import org.apache.spark.streaming.api.java.JavaInputDStream;
import org.apache.spark.streaming.api.java.JavaStreamingContext;
import org.apache.spark.streaming.kafka010.ConsumerStrategies;
import org.apache.spark.streaming.kafka010.KafkaUtils;
import org.apache.spark.streaming.kafka010.LocationStrategies;
import org.springframework.stereotype.Component;

import javax.annotation.PostConstruct;
import java.io.Serializable;
import java.util.*;

@Component
@RequiredArgsConstructor
public class MsgReceiver implements Serializable {

    private static final String topic = "flumeLog";

    private Map<String,Object> kafkaParams;

    private final JavaStreamingContext localStreamingContext;

    private final MsgResolver msgResolver;
    private final SparkConf sparkConf;
    private final SparkSession spark;

    @PostConstruct
    public void init(){
        // Kafka 配置
        kafkaParams = new HashMap<>();
        kafkaParams.put("bootstrap.servers", "localhost:9092");
        kafkaParams.put("key.deserializer", StringDeserializer.class);
        kafkaParams.put("value.deserializer", StringDeserializer.class);
        kafkaParams.put("group.id", "user_course_group_id");
        kafkaParams.put("auto.offset.reset", "latest");
        kafkaParams.put("enable.auto.commit", false);

//        localStreamingContext = new JavaStreamingContext(sparkConf, Durations.seconds(1));
        System.setProperty("HADOOP_USER_NAME","root");
//        spark = SparkSession.builder()
//                .config(localStreamingContext.sparkContext().getConf())
//                .enableHiveSupport()
//                .config("hive.metastore.uris", "thrift://localhost:9083")
//                .getOrCreate();
    }

    public void receive() throws InterruptedException {
        System.setProperty("HADOOP_USER_NAME","root");
        // 订阅的 Kafka 主题
        Collection<String> topics = Collections.singletonList(topic);

        // 创建 Kafka DStream
        JavaInputDStream<ConsumerRecord<String, String>> stream =
                KafkaUtils.createDirectStream(
                        localStreamingContext,
                        LocationStrategies.PreferConsistent(),
                        ConsumerStrategies.Subscribe(topics, kafkaParams)
                );
        // 处理接收到的数据
        JavaDStream<JSONObject> newDataStream = stream.flatMap(record -> {
            String content = record.value();
            if(StrUtil.isBlank(content)){
                return Collections.emptyIterator();
            }
            JSONObject jsonObject = JSONUtil.parseObj(content);
            return Arrays.asList(jsonObject).iterator();
        });

        newDataStream.foreachRDD(rdd -> {
            if(!rdd.isEmpty()){
                List<JSONObject> list = rdd.collect();
                msgResolver.resolve(list,spark);
            }
        });

        // 添加输出操作
        newDataStream.print();
        localStreamingContext.start();
        localStreamingContext.awaitTermination();
    }
}
