package org.apache.spark.sql.execution.datasources.rest

import java.sql.ResultSet

import org.apache.spark.executor.InputMetrics
import org.apache.spark.sql.Row
import org.apache.spark.sql.catalyst.InternalRow
import org.apache.spark.sql.catalyst.expressions.SpecificInternalRow
import org.apache.spark.sql.types.{DataType, IntegerType, LongType, Metadata, StringType, StructType}
import org.apache.spark.unsafe.types.UTF8String
import org.apache.spark.util.NextIterator

import scala.collection.mutable

case class Person(name: String, age: Int, address: String, userid: Long)
object RestUtil {
  val datas = List[Map[String, Any]](
    Map(("name", "zhangsan"), ("age", 24), ("address", "beijing"), ("userid", 1L)),
    Map(("name", "lisi"), ("age", 26), ("address", "shanghai"), ("userid", 2L)),
    Map(("name", "wangwu"), ("age", 22), ("address", "hangzhou"), ("userid", 3L))
  )
  val dataList = List(List("zhangsan", 24, "beijing"), List("lisi", 26, "shanghai"), List("wangwu", 22, "hangzhou"))


  def queryData1(schema: StructType, inputMetrics: InputMetrics): Iterator[Row] = {
    new NextIterator[Row] {
      val data = datas.iterator
      val getters: Array[RestValueGetter] = makeGetter(schema)
      val mutableRow = new SpecificInternalRow(schema.fields.map(x => x.dataType))
      override protected def getNext(): Row = {
        if (data.hasNext) {
          inputMetrics.incRecordsRead(1)
          val row = data.next()
          Row.fromSeq(schema.fields.map(name => {
            val colData = row(name.name)
            name.dataType match {
              case IntegerType => colData.asInstanceOf[Int]
              case LongType => colData.asInstanceOf[Long]
              case StringType => colData.asInstanceOf[String]
              case _ => throw new Exception("not surport type")
            }
          }).toSeq)
        } else {
          finished = true
          null.asInstanceOf[Row]
        }

      }

      override protected def close(): Unit = {
        println("execute close")
      }
    }
  }



  private type RestValueGetter = (Map[String, Any], String, InternalRow, Int) => Unit
  private def makeGetter(schema: StructType): Array[RestValueGetter] = schema.fields.map(sf => makeGetter(sf.dataType, sf.metadata))

  private def makeGetter(dt: DataType, metadata: Metadata): RestValueGetter = dt match {
    case IntegerType => (rs: Map[String, Any], name: String, row: InternalRow, pos: Int) =>
      row.setInt(pos, rs(name).asInstanceOf[Int])

    case LongType => (rs: Map[String, Any], name: String, row: InternalRow, pos: Int) =>
      row.setLong(pos, rs(name).asInstanceOf[Long])

    case StringType => (rs: Map[String, Any], name: String, row: InternalRow, pos: Int) =>
      row.update(pos, UTF8String.fromString(if(rs(name) == null) "" else rs(name).toString))

    case _ => throw new IllegalArgumentException(s"Unsupported type ${dt.catalogString}")
  }

  def queryData(schema: StructType, inputMetrics: InputMetrics): Iterator[InternalRow] = {
    new NextIterator[InternalRow] {
      val data = datas.iterator
      val getters: Array[RestValueGetter] = makeGetter(schema)
      val mutableRow = new SpecificInternalRow(schema.fields.map(x => x.dataType))
      override protected def getNext(): InternalRow = {
        if (data.hasNext) {
          inputMetrics.incRecordsRead(1)
          val row = data.next()
          var pos = 0
          schema.fields.foreach(name => {
            getters(pos).apply(row, name.name, mutableRow, pos)
            pos = pos + 1
          })
          mutableRow
        } else {
          finished = true
          null.asInstanceOf[InternalRow]
        }

      }

      override protected def close(): Unit = {
        println("execute close")
      }
    }
  }
}
