package com.tzg157.fitness.kafka;

import cn.hutool.core.util.StrUtil;
import cn.hutool.json.JSONObject;
import cn.hutool.json.JSONUtil;
import org.apache.kafka.clients.consumer.ConsumerRecord;
import org.apache.kafka.common.serialization.StringDeserializer;
import org.apache.spark.SparkConf;
import org.apache.spark.api.java.JavaSparkContext;
import org.apache.spark.sql.*;
import org.apache.spark.streaming.Durations;
import org.apache.spark.streaming.api.java.JavaDStream;
import org.apache.spark.streaming.api.java.JavaInputDStream;
import org.apache.spark.streaming.api.java.JavaStreamingContext;
import org.apache.spark.streaming.kafka010.ConsumerStrategies;
import org.apache.spark.streaming.kafka010.KafkaUtils;
import org.apache.spark.streaming.kafka010.LocationStrategies;
import org.springframework.stereotype.Component;

import javax.annotation.PostConstruct;
import java.io.Serializable;
import java.util.*;

@Component
//@RequiredArgsConstructor
public class KafkaReceiver implements Serializable {

    private static final String topic = "flumeLog";

//    private final RegisterLog2HiveServiceImpl registerLog2HiveService;
//    private final ResolveKafkaRecordService resolveKafkaRecordService;

    private Map<String,Object> kafkaParams;

//    private SparkSession sparkSession;

//    private SparkConf sparkConf;

    private JavaStreamingContext localStreamingContext;

    @PostConstruct
    public void init(){
        // Kafka 配置
//        kafkaParams = new HashMap<>();
//        kafkaParams.put("bootstrap.servers", "localhost:9092");
//        kafkaParams.put("key.deserializer", StringDeserializer.class);
//        kafkaParams.put("value.deserializer", StringDeserializer.class);
//        kafkaParams.put("group.id", "user_course_group_id");
//        kafkaParams.put("auto.offset.reset", "latest");
//        kafkaParams.put("enable.auto.commit", false);

//        localStreamingContext = new JavaStreamingContext(sparkConf, Durations.seconds(1));
//        sparkSession = SparkSession.builder()
//                .config(localStreamingContext.sparkContext().getConf())
//                .enableHiveSupport()
//                .config("hive.metastore.uris", "thrift://localhost:9083")
//                .getOrCreate();
    }

    public static void main(String[] args) throws InterruptedException {
        KafkaReceiver receiver = new KafkaReceiver();
        receiver.receive();
    }

    public void receive() throws InterruptedException {
        System.setProperty("HADOOP_USER_NAME","root");
        // 创建 SparkConf
        SparkConf conf = new SparkConf().setAppName("fitness").setMaster("local[4]");
        // 创建 JavaStreamingContext
        JavaStreamingContext jssc = new JavaStreamingContext(conf, Durations.seconds(1));

        // 创建 SparkSession
        SparkSession spark = SparkSession.builder()
                .config(jssc.sparkContext().getConf())
                .enableHiveSupport()
                .config("hive.metastore.uris", "thrift://localhost:9083")
                .getOrCreate();

        JavaSparkContext javaSparkContext = new JavaSparkContext(spark.sparkContext());
        // 订阅的 Kafka 主题
        Collection<String> topics = Collections.singletonList(topic);
        kafkaParams = new HashMap<>();
        kafkaParams.put("bootstrap.servers", "localhost:9092");
        kafkaParams.put("key.deserializer", StringDeserializer.class);
        kafkaParams.put("value.deserializer", StringDeserializer.class);
        kafkaParams.put("group.id", "user_course_group_id");
        kafkaParams.put("auto.offset.reset", "latest");
        kafkaParams.put("enable.auto.commit", false);

        // 创建 Kafka DStream
        JavaInputDStream<ConsumerRecord<String, String>> stream =
                KafkaUtils.createDirectStream(
                        jssc,
                        LocationStrategies.PreferConsistent(),
                        ConsumerStrategies.Subscribe(topics, kafkaParams)
                );
// 处理接收到的数据
        JavaDStream<Row> newDataStream = stream.flatMap(record -> {
            String content = record.value();
            if(StrUtil.isBlank(content)){
                return Collections.emptyIterator();
            }
            JSONObject jsonObject = JSONUtil.parseObj(content);
            String type = (String) jsonObject.get("type");
            Row row = null;
//            if("register".equals(type)){
//            row = registerLog2HiveService.createRow(jsonObject);
//                registerLog2HiveService.registerLog2Hive(newDataStream,spark);
//                JSONObject jsonObject = JSONUtil.parseObj(content);
            String username = (String) ((Map)jsonObject.get("data")).get("username");
            String gender = (String) ((Map)jsonObject.get("data")).get("gender");
            Integer age = (Integer) ((Map)jsonObject.get("data")).get("age");
            String timestamp_ = (String) jsonObject.get("timestamp_");
            spark.sql("insert into fitness.ods_member_info values('"+username+"','"+gender+"',"+age+",'"+timestamp_+"')");
//            }
            return Arrays.asList(row).iterator();

        });
        //先过滤无用的数据
//        JavaDStream<String> lines = stream.filter(new Function<ConsumerRecord<String, String>, Boolean>() {
//            @Override
//            public Boolean call(ConsumerRecord<String, String> orgRecord) throws Exception {
////                String today=sdf.format(new Date());
//                String orgVal=orgRecord.value();
//                return StrUtil.isNotBlank(orgVal);//满足条件的才会被留下，不满足条件都会被过滤掉
//            }
//        }).map(new Function<ConsumerRecord<String, String>, String>() {
//            @Override
//            public String call(ConsumerRecord<String, String> consumerRecord) throws Exception {
////                String line=consumerRecord.value();
////                int f1=line.indexOf(flag1)+flag1.length();
////                String temp=line.substring(f1);
////                int f2=temp.indexOf(",",0);
////                String lastupTime= line.substring(11,23);
////                String uid=temp.substring(0,f2);
////                String day=line.substring(0,10);
//                return consumerRecord.value();
//            }
//        });
//        lines.print();
//        JavaPairDStream<String, Row> pairs  = lines.mapToPair(s -> {
//            JSONObject jsonObject = JSONUtil.parseObj(s);
//            String type = (String) jsonObject.get("type");
//            String username = (String) ((Map)jsonObject.get("data")).get("username");
//            String gender = (String) ((Map)jsonObject.get("data")).get("gender");
//            Integer age = (Integer) ((Map)jsonObject.get("data")).get("age");
//            String timestamp_ = (String) jsonObject.get("timestamp_");
//            Row row = RowFactory.create(username, gender, age, timestamp_);
//            return new Tuple2<>(type, row);
//        });
//
//        pairs.foreachRDD(new VoidFunction<JavaPairRDD<String,Row>>() {
//            @Override
//            public void call(JavaPairRDD<String,Row> javaPairRDD) throws Exception {
////                javaPairRDD.foreach(new VoidFunction<Tuple2<String, Row>>() {
////
////                });
////                javaPairRDD.foreach(new VoidFunction<Tuple2<String, Row>>() {
////
////                    @Override
////                    public void call(Tuple2<String, Row> val) throws Exception {
////                        Row row = val._2;
//
////                    }
////                });
////                javaPairRDD.
////                List<Tuple2<String,Row>> list = javaPairRDD.collect();
////                list.forEach(tuple2 -> {
////                    String type = tuple2._1;
////                    Row row = tuple2._2;
////                    List<Row> rows = Arrays.asList(row);
//////                    spark.sparkContext().parallelize()
////                    JavaRDD<Row> rowRdd = javaSparkContext.parallelize(rows);
////                    if(type.equals("register")){
////                        StructType registerLogSchema = new StructType()
////                                .add("username", "string")
////                                .add("gender", "string")
////                                .add("age","int")
////                                .add("timestamp_","string");
////                        Dataset<Row> newData = spark.createDataFrame(rowRdd, registerLogSchema);
////                        newData.write().format("hive").mode(SaveMode.Append).saveAsTable("fitness.ods_member_info");
////                    }
////                });
//                JavaRDD<Row> rowRdd = javaPairRDD.map(new Function<Tuple2<String,Row>,Row>(){
//                    @Override
//                    public Row call(Tuple2<String, Row> val) throws Exception {
//                        return val._2;
//                    }
//                });
//                StructType registerLogSchema = new StructType()
//                        .add("username", "string")
//                        .add("gender", "string")
//                        .add("age","int")
//                        .add("timestamp_","string");
//                Dataset<Row> newData = spark.createDataFrame(rowRdd, registerLogSchema);
////                spark.sql("insert into fitness.ods_member_info values('test','M',32,'2025-01-14 14:29:21')");
////                newData.write().format("hive").mode(SaveMode.Append).saveAsTable("fitness.ods_member_info");
//
////                rowRdd
//
//                /*JavaRDD<String> newRdd = javaPairRDD.map(new Function<Tuple2<String, Integer>, String>() {
//                    @Override
//                    public String call(Tuple2<String, Integer> recod) throws Exception {
//                        return recod._2+"_"+recod._1;
//                    }
//                });
//
////                    System.out.println("更换位置newRdd:"+newRdd.collect());
//
//                JavaPairRDD<Integer,String> fRdd=newRdd.mapToPair(new PairFunction<String, Integer,String>() {
//                    @Override
//                    public Tuple2<Integer,String> call(String s) throws Exception {
//                        String[] vals=s.split("_");
//                        return new Tuple2<Integer,String>(Integer.valueOf(vals[0]),vals[1]+"_"+vals[2]);
//                    }
//                }).sortByKey(false);
//
//                processJavaRDDData(fRdd);*/
//            }
//        });
 /*       stream.foreachRDD(rdd -> {
            if(!rdd.isEmpty()){
                rdd.foreachPartition(iterator -> {
                    while (iterator.hasNext()) {
                        ConsumerRecord<String, String> record = iterator.next();
                        String content = record.value();
                        if(StrUtil.isBlank(content)){
                            continue;
                        }
                        JSONObject jsonObject = JSONUtil.parseObj(content);
                        String username = (String) ((Map)jsonObject.get("data")).get("username");
                        String gender = (String) ((Map)jsonObject.get("data")).get("gender");
                        Integer age = (Integer) ((Map)jsonObject.get("data")).get("age");
                        String timestamp_ = (String) jsonObject.get("timestamp_");
                        Row row = RowFactory.create(username, gender, age, timestamp_);
                        StructType registerLogSchema = new StructType()
                                .add("username", "string")
                                .add("gender", "string")
                                .add("age","int")
                                .add("timestamp_","string");
                        Dataset<Row> newData = spark.createDataFrame(Arrays.asList(row), registerLogSchema);
                        newData.write().format("hive").mode(SaveMode.Append).saveAsTable("fitness.ods_member_info");
//                        spark.sql("insert into fitness.ods_member_info values('"+username+"','"+gender+"',"+age+",'"+timestamp_+"')");
//                        resolveKafkaRecordService.resolve(content,spark);
                    }
                });
            }
        });*/
        // 处理接收到的数据
//        JavaDStream<Row> newDataStream = stream.flatMap(record -> {
//            String content = record.value();
//            if(StrUtil.isBlank(content)){
//                return Collections.emptyIterator();
//            }
//            JSONObject jsonObject = JSONUtil.parseObj(content);
//            String type = (String) jsonObject.get("type");
//            Row row = null;
////            if("register".equals(type)){
//                row = registerLog2HiveService.createRow(jsonObject);
////                registerLog2HiveService.registerLog2Hive(newDataStream,spark);
////                JSONObject jsonObject = JSONUtil.parseObj(content);
//                String username = (String) ((Map)jsonObject.get("data")).get("username");
//                String gender = (String) ((Map)jsonObject.get("data")).get("gender");
//                Integer age = (Integer) ((Map)jsonObject.get("data")).get("age");
//                String timestamp_ = (String) jsonObject.get("timestamp_");
//                spark.sql("insert into fitness.ods_member_info values('"+username+"','"+gender+"',"+age+",'"+timestamp_+"')");
////            }
//            return Arrays.asList(row).iterator();
//
//        });
//        newDataStream.print();
//        if ("register".contentEquals(typeBuilder)){
//            registerLog2HiveService.registerLog2Hive(newDataStream,spark);
//        }

        // 启动 StreamingContext
        jssc.start();
        jssc.awaitTermination();
    }
}
