package com.example.demo.demos.util;

import com.fasterxml.jackson.core.JsonProcessingException;
import org.apache.http.client.methods.CloseableHttpResponse;
import org.apache.http.client.methods.HttpPost;
import org.apache.http.entity.ContentType;
import org.apache.http.entity.StringEntity;
import org.apache.http.impl.client.CloseableHttpClient;
import org.apache.http.impl.client.HttpClients;
import org.apache.spark.SparkConf;
import org.apache.spark.streaming.Duration;
import org.apache.spark.streaming.api.java.JavaDStream;
import org.apache.spark.streaming.api.java.JavaStreamingContext;
import org.springframework.boot.CommandLineRunner;
import org.springframework.stereotype.Component;
import com.fasterxml.jackson.databind.ObjectMapper;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;

import java.io.IOException;
import java.util.HashMap;
import java.util.Map;

@Component
public class SparkStreamingJob implements CommandLineRunner {

    private static final Logger logger = LoggerFactory.getLogger(SparkStreamingJob.class);
    private long totalProcessed = 0;

    @Override
    public void run(String... args) throws Exception {
        SparkConf sparkConf = new SparkConf().setAppName("SparkStreamingJob").setMaster("local[*]");
        JavaStreamingContext streamingContext = new JavaStreamingContext(sparkConf, new Duration(2000));

        String directoryPath = "hdfs://192.168.5.128:9000/tmp/data/";
        logger.info("监听目录: {}", directoryPath);
        JavaDStream<String> lines = streamingContext.textFileStream(directoryPath);

        lines.foreachRDD(rdd -> {
            logger.info("检测到新RDD, RDD大小: {}", rdd.count());
            if (!rdd.isEmpty()) {
                long count = rdd.count();
                totalProcessed += count;
                logger.info("处理RDD, 当前RDD行数: {}, 当前总行数: {}", count, totalProcessed);

                rdd.foreachPartition(partition -> {
                    try (CloseableHttpClient httpClient = HttpClients.createDefault()) {
                        partition.forEachRemaining(line -> {
                            logger.info("处理行数据: {}", line);
                            if (line.matches("\\d+,\\d+,\\d+(\\.\\d+)?,\\d+")) {
                                String[] parts = line.split(",");
                                double rating = Double.parseDouble(parts[2]);

                                Map<String, Object> data = new HashMap<>();
                                data.put("value", rating);
                                String json = null;
                                try {
                                    json = new ObjectMapper().writeValueAsString(data);
                                } catch (JsonProcessingException e) {
                                    throw new RuntimeException(e);
                                }

                                HttpPost post = new HttpPost("http://localhost:8081/api/ratings/add");
                                post.setEntity(new StringEntity(json, ContentType.APPLICATION_JSON));

                                try (CloseableHttpResponse response = httpClient.execute(post)) {
                                    logger.info("发送评分数据: {}", json);
                                } catch (IOException e) {
                                    logger.error("发送数据时出错: {}", e.getMessage());
                                }
                            }
                        });
                    } catch (IOException e) {
                        logger.error("HttpClient出错: {}", e.getMessage());
                    }
                });
            } else {
                logger.info("空RDD，没有新数据");
            }
        });

        streamingContext.start();
        streamingContext.awaitTermination();
    }
}
