package team.bluepen.supermarket.service.calc;

import org.apache.spark.api.java.JavaSparkContext;
import org.springframework.stereotype.Service;
import scala.Tuple2;
import team.bluepen.supermarket.conf.KafkaProperties;
import team.bluepen.supermarket.data.entity.Product;
import team.bluepen.supermarket.data.entity.ProductIdNamesAmount;
import team.bluepen.supermarket.kafka.KafkaPusher;
import team.bluepen.supermarket.kafka.KafkaReceiveCallback;
import team.bluepen.supermarket.kafka.KafkaReceiver;
import team.bluepen.supermarket.kafka.KafkaTopics;
import team.bluepen.supermarket.service.DataKeys;
import team.bluepen.supermarket.service.ProductRecvService;

import java.io.Serializable;
import java.util.Collection;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.concurrent.Executors;
import java.util.concurrent.ScheduledExecutorService;
import java.util.concurrent.TimeUnit;

/**
 * (6)
 *
 * @author RollW
 */
@Service
public class ProductRddCountService implements KafkaReceiveCallback<ProductIdNamesAmount>, Serializable {
    private transient final Map<Long, ProductIdNamesAmount> amountMap = new HashMap<>();

    private final KafkaProperties kafkaProperties;
    private transient final JavaSparkContext sparkContext;
    private transient final ProductRecvService productRecvService;
    private transient final KafkaReceiver<ProductIdNamesAmount> amountKafkaReceiver;

    public ProductRddCountService(KafkaProperties kafkaProperties,
                                  ProductSQLCountService sqlCountService,
                                  ProductRecvService productRecvService) {
        this.kafkaProperties = kafkaProperties;
        this.productRecvService = productRecvService;
        sparkContext = new JavaSparkContext(sqlCountService.getSparkContext());
        start();
        this.amountKafkaReceiver = new KafkaReceiver<>(
                KafkaTopics.STATE_DATA_TOPIC,
                kafkaProperties.getBootstrapServers(),
                "ProductRddCountServ",
                DataKeys.PRODUCT_ID_NAME_STATE_COUNT,
                ProductIdNamesAmount.deserializer(),
                this);
        amountKafkaReceiver.start();
    }

    public void start() {
        scheduledExecutorService.scheduleAtFixedRate(
                this::execute,
                0,
                5,
                TimeUnit.SECONDS);
    }

    private void execute() {
        try {
            List<Product> products = productRecvService.getReceived();
            if (products.isEmpty()) {
                return;
            }
            sparkContext.parallelize(products)
                    .mapToPair(product -> new Tuple2<>(product.getId(),
                            new ProductIdNamesAmount(product.getId()).put(product)))
                    .reduceByKey(ProductIdNamesAmount::add)
                    .values()
                    .foreach(productIdNamesAmount -> {
                        KafkaPusher<ProductIdNamesAmount> pusher = new KafkaPusher<>(
                                KafkaTopics.STATE_DATA_TOPIC,
                                kafkaProperties.getBootstrapServers(),
                                ProductIdNamesAmount.serializer());
                        pusher.push(DataKeys.PRODUCT_ID_NAME_STATE_COUNT, productIdNamesAmount);
                        pusher.close();
                    });
        } catch (Exception e) {
            e.printStackTrace();
        }
    }

    public Collection<ProductIdNamesAmount> getAmountCollection() {
        return amountMap.values();
    }

    public ProductIdNamesAmount getById(long id) {
        return amountMap.get(id);
    }

    public void shutdown() {
        sparkContext.close();
        scheduledExecutorService.shutdown();
    }

    private transient final ScheduledExecutorService scheduledExecutorService =
            Executors.newSingleThreadScheduledExecutor();

    @Override
    public void onReceive(ProductIdNamesAmount data) {
        amountMap.put(data.getId(), data);
    }
}
