package org.apache.spark.streaming.rabbitmq

import java.util.{Locale, Optional}

import lombok.extern.slf4j.Slf4j
import org.apache.spark.internal.Logging
import org.apache.spark.sql.SQLContext
import org.apache.spark.sql.execution.streaming._
import org.apache.spark.sql.sources.v2.reader.streaming.{ContinuousReader, MicroBatchReader}
import org.apache.spark.sql.sources.v2.{ContinuousReadSupport, DataSourceOptions, DataSourceV2, MicroBatchReadSupport}
import org.apache.spark.sql.sources.{DataSourceRegister, StreamSourceProvider}
import org.apache.spark.sql.types.StructType

import scala.collection.JavaConverters._

/**
 *
 * Amqp Spark Structed Streaming
 *
 *
 * <pre>
 *
 * Created by zhenqin.
 * User: zhenqin
 * Date: 2020/12/29
 * Time: 下午2:24
 * Vendor: yiidata.com
 *
 * </pre>
 *
 * @author zhenqin
 */
@Slf4j
class AmqpSourceProvider extends DataSourceV2
  with StreamSourceProvider
  with DataSourceRegister
  with MicroBatchReadSupport
  with ContinuousReadSupport
  with Logging {


  /**
   * 字段
   */
  var fields: StructType = AmqpSource.amqpSchema

  /**
   * name
   * @return
   */
  override def shortName(): String = "amqp"


  /**
   *
   * @param sqlContext
   * @param schema
   * @param providerName
   * @param parameters
   * @return
   */
  override def sourceSchema(sqlContext: SQLContext,
                            schema: Option[StructType],
                            providerName: String,
                            parameters: Map[String, String]): (String, StructType) = {
    validateStreamOptions(parameters)
    require(schema.isDefined, "Amqp source has a fixed schema and cannot be set with a custom one")
    logInfo(parameters.toString())
    (shortName(), schema.get)
  }


  /**
   *
   * 检查
   *
   * @param caseInsensitiveParams
   */
  private def validateStreamOptions(caseInsensitiveParams: Map[String, String]) = {
    // Stream specific options
    val specifiedParams = caseInsensitiveParams
        .keySet
        .filter(_.toLowerCase(Locale.ROOT).startsWith("amqp."))
        .map { k => k.drop(5) -> caseInsensitiveParams(k) }
        .toMap
    validateGeneralOptions(specifiedParams)
  }

  private def validateGeneralOptions(parameters: Map[String, String]): Unit = {
    // Validate source options
    // Validate user-specified Kafka options
    logInfo(parameters.toString())
    if (!parameters.contains(s"${ConfigParameters.QueueNameKey.toLowerCase}")) {
      throw new IllegalArgumentException(
        s"Amqp option '${ConfigParameters.QueueNameKey}' is not supported as " +
          s"user-specified consumer groups are not used to track offsets.")
    }
  }

  override def createSource(sqlContext: SQLContext,
                            metadataPath: String,
                            schema: Option[StructType],
                            providerName: String,
                            parameters: Map[String, String]): Source = {
    schema.getOrElse(() => throw new IllegalArgumentException("schema not to set."))
    new AmqpSource(sqlContext, schema.getOrElse(fields), parameters, metadataPath)
  }


  override def createContinuousReader(schema: Optional[StructType],
                                      checkpointLocation: String,
                                      options: DataSourceOptions): ContinuousReader = {
    //schema.orElseThrow(() => throw new IllegalArgumentException("schema not to set."))
    val caseInsensitiveParams = options.asMap().asScala
    val specifiedParams = caseInsensitiveParams
      .keySet
      .filter(_.toLowerCase(Locale.ROOT).startsWith("amqp."))
      .map { k => k.drop(5) -> caseInsensitiveParams(k) }
      .toMap
    new AmqpContinuousReader(schema.orElse(fields), specifiedParams)
  }

  override def createMicroBatchReader(schema: Optional[StructType],
                                      checkpointLocation: String,
                                      options: DataSourceOptions): MicroBatchReader = {
    //schema.orElseThrow(() => throw new IllegalArgumentException("schema not to set."))
    val caseInsensitiveParams = options.asMap().asScala
    val specifiedParams = caseInsensitiveParams
      .keySet
      .filter(_.toLowerCase(Locale.ROOT).startsWith("amqp."))
      .map { k => k.drop(5) -> caseInsensitiveParams(k) }
      .toMap
    new AmqpMicroBatchReader(schema.orElse(fields), specifiedParams)
  }
}