package team.bluepen.supermarket.service.calc;

import org.apache.spark.api.java.JavaSparkContext;
import org.apache.spark.streaming.api.java.JavaDStream;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.stereotype.Service;
import scala.Tuple2;
import team.bluepen.supermarket.conf.KafkaProperties;
import team.bluepen.supermarket.data.entity.Product;
import team.bluepen.supermarket.data.entity.ProductIdAmount;
import team.bluepen.supermarket.data.entity.ProductNameAmount;
import team.bluepen.supermarket.data.entity.ProductNameStatusAmount;
import team.bluepen.supermarket.data.entity.ProductStatePair;
import team.bluepen.supermarket.data.entity.ProductStatusAmount;
import team.bluepen.supermarket.kafka.KafkaPusher;
import team.bluepen.supermarket.kafka.KafkaTopics;
import team.bluepen.supermarket.service.DataKeys;
import team.bluepen.supermarket.service.ProductClientMethodRegister;

import java.io.Serializable;

/**
 * @author RollW
 */
@Service
public class ProductStateCountService implements ProductClientMethodRegister, Serializable {
    private static final Logger logger = LoggerFactory.getLogger(ProductStateCountService.class);
    private transient final ProductSparkStreamingClient client;
    private final KafkaProperties kafkaProperties;

    public ProductStateCountService(KafkaProperties kafkaProperties,
                                    ProductSQLCountService sqlCountService) {
        this.kafkaProperties = kafkaProperties;
        client = new ProductSparkStreamingClient(kafkaProperties,
                sqlCountService.getSparkContext(),
                KafkaTopics.PRODUCT_TOPIC, this);
        client.start();
    }

    public JavaSparkContext getSparkContext() {
        return client.getContext().sparkContext();
    }

    @Override
    public void registerMethods(JavaDStream<Product> productJavaDStream) {
        // (2)
        productJavaDStream
                .mapToPair((product ->
                        new Tuple2<>(product.isState(), product.getAmount())))
                .reduceByKey(Integer::sum)
                .foreachRDD(rdd -> rdd.foreach(tuple -> {
                    KafkaPusher<ProductStatusAmount> amountKafkaPusher = new KafkaPusher<>(
                            KafkaTopics.STATE_DATA_TOPIC,
                            kafkaProperties.getBootstrapServers(),
                            ProductStatusAmount.serializer());
                    amountKafkaPusher.push(DataKeys.PRODUCT_COUNT,
                            new ProductStatusAmount(tuple._1, tuple._2));
                    amountKafkaPusher.close();
                }));

        // (4)
        productJavaDStream.mapToPair((product ->
                        new Tuple2<>(product.getName(), product.getAmount())))
                .reduceByKey(Integer::sum)
                .foreachRDD(rdd -> rdd.foreach(tuple -> {
                    KafkaPusher<ProductNameAmount> amountKafkaPusher = new KafkaPusher<>(
                            KafkaTopics.STATE_DATA_TOPIC,
                            kafkaProperties.getBootstrapServers(),
                            ProductNameAmount.serializer());
                    amountKafkaPusher.push(DataKeys.PRODUCT_NAME_COUNT,
                            new ProductNameAmount(tuple._1, tuple._2));
                    amountKafkaPusher.close();
                }));

        // (3)
        productJavaDStream.mapToPair(product -> new Tuple2<>(product.getId(), product.getAmount()))
                .reduceByKey(Integer::sum)
                .foreachRDD(rdd -> rdd.foreach(tuple -> {
                    KafkaPusher<ProductIdAmount> amountKafkaPusher = new KafkaPusher<>(
                            KafkaTopics.STATE_DATA_TOPIC,
                            kafkaProperties.getBootstrapServers(),
                            ProductIdAmount.serializer());
                    amountKafkaPusher.push(DataKeys.PRODUCT_ID_COUNT,
                            new ProductIdAmount(tuple._1, tuple._2));
                    amountKafkaPusher.close();
                }));

        // (5)
        productJavaDStream.mapToPair((product -> new Tuple2<>(product.getName(),
                        ProductStatePair.create(product.isState(), product.getAmount()))))
                .reduceByKey(ProductStatePair::add)
                .foreachRDD(rdd -> rdd.foreach(tuple -> {
                    KafkaPusher<ProductNameStatusAmount> amountKafkaPusher = new KafkaPusher<>(
                            KafkaTopics.STATE_DATA_TOPIC,
                            kafkaProperties.getBootstrapServers(),
                            ProductNameStatusAmount.serializer());
                    amountKafkaPusher.push(DataKeys.PRODUCT_NAME_STATE_COUNT,
                            new ProductNameStatusAmount(tuple._1, tuple._2.getActiveCount(),
                                    tuple._2.getInactiveCount()));
                    amountKafkaPusher.close();
                }));
    }
}
