package spark.sql

import org.apache.spark.sql.Row

object RowTest {

  def main(args: Array[String]): Unit = {
    val row = Row(1, "hello")
    println( row.getAs[Int](0))
    println( row.get(0))
    println(row.getAs[String](1))
    Row.fromTuple((0, "hello"))
    val row2 = Row.merge(Row(1), Row("hello"))
    println( row2)
    Row.empty == Row()
    Row(1, "hello") match { case Row(key: Int, value: String) =>
      key -> value
    }



    import org.apache.spark.sql.types._
    val schema = StructType(
      StructField("id", LongType, nullable = false) ::
        StructField("name", StringType, nullable = false) :: Nil)

    import org.apache.spark.sql.catalyst.encoders.RowEncoder
    val encoder = RowEncoder(schema)
    println( encoder)
  }

}
