package com.geomesa.spark.SparkCore

import org.apache.spark.sql.SparkSession
import org.apache.spark.sql.types.{ArrayType, StringType, StructField, StructType}
import org.json.simple.parser.JSONParser
import org.locationtech.jts.geom._

object DistanceCal {
  def main(args: Array[String]): Unit = {
    import org.locationtech.geomesa.spark.jts._
    //spark
    val spark: SparkSession = {
      SparkSession.builder()
        .appName("test")
        .master("local[*]")
        .getOrCreate()
        //需注入spark.jts._包
        .withJTS
    }

    val schema = StructType(Seq(
      StructField("crs", StringType),
      StructField("features", ArrayType(
        StructType(Seq(StructField("geometry",
          StructType(Seq(StructField("coordinates", StringType)
          )))))))
    ))

    val dataFile = this.getClass.getClassLoader.getResource("linepp.txt").getPath
    val df = spark.read
      //.schema(schema)
      .json(dataFile)

    val geo = new GeometryFactory()
    val jsonParser = new JSONParser()
    import spark.implicits._
    import org.locationtech.geomesa.spark.jts.util.GeoJSONExtensions._

    val res = df.toGeoJSON.head().replaceAll("\\s+", "")
    println(res)

    val newDF = df.withColumn("features", $"features")
      .withColumn("geometry", $"features.geometry")
      .withColumn("coordinates", $"geometry.coordinates")

  }
}
