package com.suddev.bigdata.sql

import com.suddev.bigdata.utils.ContextUtils
import com.vividsolutions.jts.geom.Geometry
import org.apache.spark.serializer.KryoSerializer
import org.apache.spark.sql.{Row, SparkSession}
import org.datasyslab.geospark.serde.GeoSparkKryoRegistrator
import org.datasyslab.geospark.spatialRDD.SpatialRDD
import org.datasyslab.geosparksql.utils.{Adapter, GeoSparkSQLRegistrator}

/**
  *
  * @author Rand
  * @date 2020/2/9 0009
  */
object DemoApp {
  def main(args: Array[String]): Unit = {
    val sparkSession = SparkSession.builder().
      config("spark.serializer",classOf[KryoSerializer].getName).
      config("spark.kryo.registrator", classOf[GeoSparkKryoRegistrator].getName).
      master("local[*]").appName("myGeoSparkSQLdemo").getOrCreate()
    // Important!
    GeoSparkSQLRegistrator.registerAll(sparkSession)
    import sparkSession.implicits._

    val df = sparkSession.read.csv("data/checkin.csv")
    df.createOrReplaceTempView("inputtable")
    var spatialDf = sparkSession.sql(
      """
        |SELECT ST_Point(CAST(inputtable._c0 AS Decimal(24,20)),CAST(inputtable._c1 AS Decimal(24,20))) AS checkin
        |FROM inputtable
      """.stripMargin)
    spatialDf.map{
      case Row(checkin:Geometry) =>
        checkin.getCoordinate.toString
    }.show()
//    var spatialRDD = new SpatialRDD[Geometry]
//    spatialRDD.rawSpatialRDD = Adapter.toRdd(spatialDf)
  }
}
