package com.geomesa.spark.SparkCore

import org.apache.spark.sql.SparkSession

object SchemaTest {
  def main(args: Array[String]): Unit = {
    import org.locationtech.geomesa.spark.jts._
    //spark
    val spark: SparkSession = {
      SparkSession.builder()
        .appName("test")
        .master("local[*]")
        .getOrCreate()
        //需注入spark.jts._包
        .withJTS
    }

    val dataFile = this.getClass.getClassLoader.getResource("linepp.txt").getPath
    val df = spark.read
      .json(dataFile)
    //.show(5, false)
    //.printSchema()

    /*    root
      |-- crs: struct (nullable = true)
      |    |-- properties: struct (nullable = true)
      |    |    |-- name: string (nullable = true)
      |    |-- type: string (nullable = true)
      |-- features: array (nullable = true)
      |    |-- element: struct (containsNull = true)
      |    |    |-- geometry: struct (nullable = true)
      |    |    |    |-- coordinates: array (nullable = true)
      |    |    |    |    |-- element: array (containsNull = true)
      |    |    |    |    |    |-- element: array (containsNull = true)
      |    |    |    |    |    |    |-- element: double (containsNull = true)
      |    |    |    |-- type: string (nullable = true)
      |    |    |-- geometry_name: string (nullable = true)
      |    |    |-- id: string (nullable = true)*/

    import spark.implicits._
    val newDF = df
      .withColumn("crs", $"crs")
      .withColumn("features", $"features")
      .withColumn("geometry", $"features.geometry")
      .withColumn("coordinates", $"geometry.coordinates")
      .show()



  }
}
