package team.bluepen.supermarket.service.calc

import org.apache.spark.SparkContext
import org.apache.spark.sql.functions._
import org.apache.spark.sql.streaming.{OutputMode, Trigger}
import org.apache.spark.sql.types._
import org.apache.spark.sql.{Dataset, Row, SparkSession}
import org.springframework.stereotype.Service
import team.bluepen.supermarket.conf.{KafkaProperties, SparkProperties}
import team.bluepen.supermarket.kafka.KafkaTopics

import scala.collection.immutable.HashMap

/**
 * @author RollW
 */
@Service
class ProductSQLCountService(private val sparkProperties: SparkProperties,
                             private val kafkaProperties: KafkaProperties) {
    System.setProperty("hadoop.home.dir", sparkProperties.getHadoopHome)

    private[this] val sparkSession: SparkSession = SparkSession.builder
      .appName("ProductService")
      .config("spark.sql.streaming.checkpointLocation", ".sql_checkpoint")
      .master(sparkProperties.getMaster)
      .getOrCreate

    openClient()

    def getSparkContext: SparkContext = {
        sparkSession.sparkContext
    }

    private def openClient(): Unit = {
        val structType: StructType = new StructType()
          .add("userId", LongType)
          .add("id", LongType)
          .add("name", StringType)
          .add("price", IntegerType)
          .add("amount", IntegerType)
          .add("state", BooleanType)
        val config: Map[String, String] = HashMap(
            "sep" -> "\t"
        )

        val dataset: Dataset[Row] = sparkSession.readStream.format("kafka")
          .option("kafka.bootstrap.servers", kafkaProperties.getBootstrapServers)
          .option("subscribe", KafkaTopics.PRODUCT_TOPIC)
          .load
          .selectExpr("timestamp", "CAST(value AS STRING)")
          .select(col("timestamp"),
              from_csv(col("value"), structType, config)
                .as("product"))
          .groupBy(
              col("product.id"),
              col("product.state"))
          .sum("product.amount")
          .select(
              to_json(struct(
                  col("id"),
                  col("state"),
                  col("`sum(product.amount AS amount)`").as("amount"),
              )).as("value")
          )

        val stream = dataset.writeStream
          .format("kafka")
          .outputMode(OutputMode.Update())
          .trigger(Trigger.ProcessingTime("5 seconds"))
          .option("kafka.bootstrap.servers", kafkaProperties.getBootstrapServers)
          .option("topic", KafkaTopics.SQL_STATE_DATA_TOPIC)
          .start()

        // stream.awaitTermination()
    }

    def closeClient(): Unit = {
        sparkSession.close()
    }
}
