package com.geomesa.spark.SparkCore

import org.apache.spark.sql.SparkSession


/**
  * 不构建UDF，直接通过select
  */
object BySelectFunction {

  def main(args: Array[String]): Unit = {
    import org.locationtech.geomesa.spark.jts._
    val spark: SparkSession = {
      SparkSession.builder()
        .appName("BySelectFunction")
        .master("local[2]")
        .getOrCreate()
        .withJTS
    }

    val df = spark.read.json(this.getClass.getClassLoader.getResource("lineline.txt").getPath)
    import spark.implicits._
    val frame = df.select($"type", $"id", $"geometry.type", $"geometry.coordinates")

    //点集合格式：array<array<array<double>>>
    val coordinates = df.select($"geometry.coordinates")
    val array = coordinates.toJSON
    /*val jsons = JSON.parseObject(array.toString())
    println(jsons)*/


    coordinates.show(5)
    val jsonRDD = coordinates.rdd
  }
}
