package org.apache.spark.sql.execution.datasources.rest

import org.apache.spark.{Partition, SparkContext}
import org.apache.spark.rdd.RDD
import org.apache.spark.sql.catalyst.InternalRow
import org.apache.spark.sql.{Row, SQLContext, SparkSession}
import org.apache.spark.sql.sources.{BaseRelation, Filter, PrunedFilteredScan, PrunedScan}
import org.apache.spark.sql.types.{IntegerType, LongType, StringType, StructField, StructType}

import scala.collection.mutable.ArrayBuffer

// PrunedScan  列裁剪，不带条件过滤
// PrunedFilterScan  列裁剪，带条件过滤
class RestRelation(sparkSession: SparkSession, parts: Array[Partition], params: Map[String, String])
  extends BaseRelation with PrunedFilteredScan {

  override def sqlContext: SQLContext = sparkSession.sqlContext
  def sparkContext: SparkContext = sparkSession.sparkContext

  override def schema: StructType = {
    println("schema")
    val fields: ArrayBuffer[StructField] = new ArrayBuffer[StructField]()
    fields.append(StructField("name", StringType, false))
    fields.append(StructField("age", IntegerType, false))
    fields.append(StructField("address", StringType, false))
    fields.append(StructField("userid", LongType, false))

    new StructType(fields.toArray)
  }

  override def buildScan(requiredColumns: Array[String], filters: Array[Filter]): RDD[Row] = {
    println("buildscan")
    // RestRDD.scanTable(sparkContext)
    println(requiredColumns.mkString(", "))
//    RestRDD.scanTable(sparkContext, schema, requiredColumns, parts, params, filters).asInstanceOf[RDD[Row]]
    new RestRDD(sparkContext, schema, requiredColumns, parts, params, filters).asInstanceOf[RDD[Row]]
  }
}
