package com.example

import org.apache.sedona.spark.SedonaContext
import org.apache.spark.sql.SparkSession
import org.locationtech.jts.geom.{Coordinate, GeometryFactory}

object TestSql {
  def main(args: Array[String]): Unit = {

    println("Hello world!")
    val session = SparkSession.builder()
      .appName("ManualGridSize")
      .master("local[*]")
      .config("spark.driver.maxResultSize", "2g") // org.apache.spark.SparkException: Job aborted due to stage failure: Total size of serialized results of 1 tasks (1917.5 MiB) is bigger than spark.driver.maxResultSize (1024.0 MiB)
      .config("spark.driver.memory", "2g") // try to fix java.lang.OutOfMemoryError: GC overhead limit exceeded
      .config("spark.testing.memory", "471859200") // 在IDEA中运行时：Exception in thread "main" java.lang.IllegalArgumentException: System memory 259522560 must be at least 471859200. Please increase heap size using the --driver-memory option or spark.driver.memory in Spark configuration.
      .config("spark.serializer", "org.apache.spark.serializer.KryoSerializer")
      .config("spark.kryo.registrator", "org.apache.sedona.core.serde.SedonaKryoRegistrator")
      .config("spark.scheduler.listenerbus.eventqueue.threads", "4")
      .getOrCreate()
    session.sparkContext.setLogLevel("ERROR")

    try {
      val spark = SedonaContext.create(session)
      println("[********]Sedona initialized successfully![********]")
      // [1] 需要切割的一个区域保存在postgres数据库中，先读取出来。
      //     有两块区域，一块是区域的边界polygon, 另一块是陆地边界polygon
      val jdbcUrl = "jdbc:postgresql://192.168.20.53:5432/seamap"
      val connectionProperties = new java.util.Properties()
      connectionProperties.put("user", "gis_read")
      connectionProperties.put("password", "isd5Wp3h2sqQ")
      connectionProperties.put("driver", "org.postgresql.Driver")

      val (minX,maxX,minY,maxY) = (100.0, 140.0, -6.366, 23.634)
      val coordinates = Array(
        new Coordinate(minX, minY),
        new Coordinate(maxX, minY),
        new Coordinate(maxX, maxY),
        new Coordinate(minX, maxY),
        new Coordinate(minX, minY) // 闭合
      )
      val geometryFactory = new GeometryFactory()
      val wkt = geometryFactory.createPolygon(coordinates).toString
      println("rect we need is: ", wkt)
      //expr("ST_GeomFromText(geom)")

      val landDF = spark.read
        .jdbc(jdbcUrl, "datagis.t_lndare_r", connectionProperties)
        .selectExpr("ST_GeomFromWKB(geom) AS geom")
        .where("ST_Intersects(geom, ST_GeomFromText('"+wkt+"', 4326))")
      //landDF.cache()
      println("[****] t_lndare_t table [****]")
      landDF.show(1)
      val c = landDF.count();
      println("[****] t_lndare_t table counts after ST_Intersects [****]")
      println(c)
    }
    finally {
      session.stop()
    }
  }
}
