package com.kevin

import geotrellis.layer.stitch.TileLayoutStitcher
import geotrellis.layer.{KeyBounds, LayoutDefinition, SpatialKey, TileLayerMetadata}
import geotrellis.proj4.LatLng
import geotrellis.spark._
import geotrellis.raster._
import geotrellis.raster.density.KernelStamper
import geotrellis.raster.mapalgebra.focal._
import geotrellis.vector._

import scala.util._
import geotrellis.raster.render._
import geotrellis.raster.render.ColorRamps
import geotrellis.raster.render.ColorMap
import geotrellis.raster.io.geotiff._
import geotrellis.raster.mapalgebra.local.LocalTileBinaryOp
import geotrellis.spark.stitch._
import geotrellis.spark.tiling._
import org.apache.spark.{SparkConf, SparkContext}
import org.apache.spark.rdd.RDD

/**
 * @author zmx
 * @create 2021-12-16 11:34
 */
object SparkTest {

  val extent = Extent(-109, 37, -102, 41)

  def randomPointFeature(extent: Extent): PointFeature[Double] = {
    def randInRange(low: Double, high: Double): Double = {
      val x = Random.nextDouble
      low * (1 - x) + high * x
    }

    Feature(Point(randInRange(extent.xmin, extent.xmax), // the geometry几何
      randInRange(extent.ymin, extent.ymax)),
      Random.nextInt % 16 + 16) // the weight (attribute)权重（属性
  }

  val pts = (for (i <- 1 to 1000) yield randomPointFeature(extent)).toList
  val kernelWidth: Int = 9
  //具有标准的高斯核。偏差1.5，振幅25
  val kern: Kernel = Kernel.gaussian(kernelWidth, 1.5, 25)
  val kde: Tile = pts.kernelDensity(kern, RasterExtent(extent, 700, 400))

  val colorMap = ColorMap(
    (0 to kde.findMinMax._2 by 4).toArray,
    ColorRamps.HeatmapBlueToYellowToRedSpectrum
  )
  //kde.renderPng(colorMap).write("test.png")

  // GeoTiff(kde, extent, LatLng).write("test.tiff")

  //******大规模数据集,栅格细分为一组瓦片*****
  val tl = TileLayout(7, 4, 100, 100)
  val ld = LayoutDefinition(extent, tl)

  //生成以给定点为中心的内核范围
  def pointFeatureToExtent[D](kwidth: Double, ld: LayoutDefinition, ptf: PointFeature[D]): Extent = {
    val p = ptf.geom

    Extent(p.x - kwidth * ld.cellwidth / 2,
      p.y - kwidth * ld.cellheight / 2,
      p.x + kwidth * ld.cellwidth / 2,
      p.y + kwidth * ld.cellheight / 2)
  }

  def ptfToExtent[D](p: PointFeature[D]) = pointFeatureToExtent(9, ld, p)

  //val aaa = ld.mapTransform(ptfToExtent(Feature(Point(-108, 38), 100.0)))

  //将点列表转换为一个(SpatialKey, List[PointFeature[Double]])集合
  def ptfToSpatialKey[D](ptf: PointFeature[D]): Iterator[(SpatialKey, PointFeature[D])] = {
    val ptextent = ptfToExtent(ptf)
    val gridBounds = ld.mapTransform(ptextent)

    for {
      (c, r) <- gridBounds.coordsIter
      if r < tl.totalRows
      if c < tl.totalCols
    } yield (SpatialKey(c, r), ptf)
  }

  val keyfeatures: Map[SpatialKey, List[PointFeature[Double]]] =
    pts
      .flatMap(ptfToSpatialKey)
      .groupBy(_._1)
      .map { case (sk, v) => (sk, v.unzip._2) }
  //println(keyfeatures)
  //子图块以相同的方式生成
  val keytiles = keyfeatures.map { case (sk, pfs) =>
    (sk, pfs.kernelDensity(
      kern,
      RasterExtent(ld.mapTransform(sk), tl.tileDimensions._1, tl.tileDimensions._2)
    ))
  }
  //println(keytiles)
  //在地图中表示每个 SpatialKey
  val tileList =
  for {
    r <- 0 until ld.layoutRows
    c <- 0 until ld.layoutCols
  } yield {
    val k = SpatialKey(c, r)
    (k, keytiles.getOrElse(k, IntArrayTile.empty(tl.tileCols, tl.tileRows)))
  }

  val stitched = TileLayoutStitcher.stitch(tileList)._1
  //println(stitched)

  //分布式计算
  val conf = new SparkConf().setMaster("local").setAppName("Kernel Density")
  val sc = new SparkContext(conf)
  //改变控制台日志输出级别 以免对结果产生影响
  sc.setLogLevel("WARN")
  //并行化的 PointFeatures 集合
  val pointRdd = sc.parallelize(pts, 10)
  //println(pointRdd)

  //并行
  def stampPointFeature(
                         tile: MutableArrayTile,
                         tup: (SpatialKey, PointFeature[Double])
                       ): MutableArrayTile = {
    val (spatialKey, pointFeature) = tup
    val tileExtent = ld.mapTransform(spatialKey)
    val re = RasterExtent(tileExtent, tile)
    val result = tile.copy.asInstanceOf[MutableArrayTile]

    KernelStamper(result, kern)
      .stampKernelDouble(re.mapToGrid(pointFeature.geom), pointFeature.data)

    result
  }

  object Adder extends LocalTileBinaryOp {
    def combine(z1: Int, z2: Int) = {
      if (isNoData(z1)) {
        z2
      } else if (isNoData(z2)) {
        z1
      } else {
        z1 + z2
      }
    }

    def combine(r1: Double, r2: Double) = {
      if (isNoData(r1)) {
        r2
      } else if (isNoData(r2)) {
        r1
      } else {
        r1 + r2
      }
    }
  }

  def sumTiles(t1: MutableArrayTile, t2: MutableArrayTile): MutableArrayTile = {
    Adder(t1, t2).asInstanceOf[MutableArrayTile]
  }

  //该mapPartitions操作只是将转换应用于 RDD，而不会触发任何类型的改组操作。在这里，需要使 SpatialKey 可用，stampPointFeature以便它可以正确确定相应 tile 中的像素位置。
  val tileRdd: RDD[(SpatialKey, Tile)] =
    pointRdd
      .flatMap(ptfToSpatialKey)
      .mapPartitions({ partition =>
        partition.map { case (spatialKey, pointFeature) =>
          (spatialKey, (spatialKey, pointFeature))
        }
      }, preservesPartitioning = true)
      .aggregateByKey(ArrayTile.empty(DoubleCellType, ld.tileCols, ld.tileRows))(stampPointFeature, sumTiles)
      .mapValues { tile: MutableArrayTile => tile.asInstanceOf[Tile] }

  val metadata = TileLayerMetadata(DoubleCellType,
    ld,
    ld.extent,
    LatLng,
    KeyBounds(SpatialKey(0, 0),
      SpatialKey(ld.layoutCols - 1,
        ld.layoutRows - 1)))

  val resultRdd = ContextRDD(tileRdd, metadata)


  def main(args: Array[String]): Unit = {
    //遍历ResultRDD
    resultRdd.foreach(println)
    val tile = resultRdd.stitch.tile
    val extent1 = resultRdd.stitch.extent
    println(tile)
    println(extent1)


    //println(pts)
    //println(res0)
    //println(res1)
    //println(aaa)
    //println(res)
    //println(resultRdd)
    //println(res)

  }
}
