package com.suddev.bigdata.core

import com.vividsolutions.jts.geom.{Coordinate, GeometryFactory, Point}
import org.apache.spark.serializer.KryoSerializer
import org.apache.spark.{SparkConf, SparkContext}
import org.datasyslab.geospark.formatMapper.{GeoJsonReader, WktReader}
import org.datasyslab.geospark.serde.GeoSparkKryoRegistrator
import org.datasyslab.geospark.spatialRDD.PointRDD
import com.suddev.bigdata.utils.ImplicitAspect._

/**
  *
  * @author Rand
  * @date 2020/2/11 0011
  */
object CreateApp {
  def main(args: Array[String]): Unit = {
    val conf = new SparkConf().
      setAppName("GeoSparkDemo1").
      setMaster("local[*]").
      set("spark.serializer", classOf[KryoSerializer].getName).
      set("spark.kryo.registrator", classOf[GeoSparkKryoRegistrator].getName)
    val sc = new SparkContext(conf)


    val data = Array(
      (-88.331492,32.324142,"hotel"),
      (-88.175933,32.360763,"gas"),
      (-88.388954,32.357073,"bar"),
      (-88.221102,32.35078,"restaurant")
    )
    val geometryFactory = new GeometryFactory()
    val pointsRowSpatialRDD = sc.parallelize(data)
      .map(x => {
        val coord = new Coordinate(x._1, x._2)
        val userData = x._3
        val point = geometryFactory.createPoint(coord)
        point.setUserData(userData)
        point
      })
    val pointRDD = new PointRDD(pointsRowSpatialRDD)

//    val inputLocation = "data/checkin.tsv"
//    val wktColumn = 0 // The WKT string starts from Column 0
//    val allowTopologyInvalidGeometries = true
//    val skipSyntaxInvalidGeometries = false
//    val spatialRDD = WktReader.readToGeometryRDD(sc, inputLocation, wktColumn, allowTopologyInvalidGeometries, skipSyntaxInvalidGeometries)

    val inputLocation = "data/polygon.json"
    val allowTopologyInvalidGeometries = true
    val skipSyntaxInvalidGeometries = false
    val spatialRDD = GeoJsonReader.readToGeometryRDD(sc, inputLocation, allowTopologyInvalidGeometries, skipSyntaxInvalidGeometries)
    spatialRDD.rawSpatialRDD.rdd.printInfo()
  }
}
