package org.apache.spark.streaming.rabbitmq

import org.apache.spark.internal.Logging
import org.apache.spark.sql.{DataFrame, SQLContext}
import org.apache.spark.sql.execution.streaming.{Offset, Source}
import org.apache.spark.sql.types.{StructField, StructType}
import org.apache.spark.sql.types._

import scala.collection.JavaConverters._

/**
 *
 *
 *
 *
 * <pre>
 *
 * Created by zhenqin.
 * User: zhenqin
 * Date: 2020/12/30
 * Time: 下午9:47
 * Vendor: yiidata.com
 *
 * </pre>
 *
 * @author zhenqin
 */
class AmqpSource(
                  sqlContext: SQLContext,
                  schema: StructType,
                  sourceOptions: Map[String, String],
                  metadataPath: String
                ) extends Source with Logging {


  override def schema(): StructType = this.schema

  override def getOffset: Option[Offset] = ???

  override def getBatch(start: Option[Offset], end: Offset): DataFrame = {
    return null
  }

  override def stop(): Unit = {

  }

}

object AmqpSource {

    def amqpSchema: StructType = StructType(Seq(
      StructField("consumer_tag", StringType),
      StructField("exchange", StringType),
      StructField("routing_key", StringType),
      StructField("queue_name", StringType),
      StructField("current_timestamp", LongType),
      StructField("message", StringType)
    ))
}
