package com.sjgs.gis.spark

import geotrellis.proj4.WebMercator
import geotrellis.raster.render.Png
import geotrellis.raster.resample.Bilinear
import geotrellis.raster.{MultibandTile, _}
import geotrellis.spark.SpatialKey._
import geotrellis.spark._
import geotrellis.spark.io._
import geotrellis.spark.io.file._
import geotrellis.spark.io.hadoop._
import geotrellis.spark.io.index._
import geotrellis.spark.pyramid._
import geotrellis.spark.tiling.{FloatingLayoutScheme, _}
import geotrellis.vector.ProjectedExtent
import javax.servlet.ServletOutputStream
import javax.servlet.http.HttpServletResponse
import org.apache.spark._
import org.apache.spark.rdd.RDD


/**
  * geotrellis栅格切图
  *
  * @author jxw 20200918
  */
class SparkTileMapService(val localPath: String, val catalogPath: String) {

  def readTileLayer(implicit sc: SparkContext, storeType: String, httpServletResponse: HttpServletResponse, layer: String, z: Integer, x: Integer, y: Integer): Unit = {
    // Read in the tile at the given z/x/y coordinates.
    val tileOpt: Option[MultibandTile] =
      try {
        val layerId: LayerId = LayerId(layer, z)
        val key = SpatialKey(x, y)
        storeType match {
          case "fs" =>
            // Create a readers that will read in the indexed tiles we produced in IngestImage.
            val attributeStore: AttributeStore = AttributeStore(localPath)
            val valueReader: ValueReader[LayerId] = ValueReader(attributeStore, localPath)
            val reader = valueReader.reader[SpatialKey, MultibandTile](layerId)
            //default x,y request is WebMercator format
            Some(reader.read(key))
          case "hdfs" =>
            val hadoopValueReader: HadoopValueReader = HadoopValueReader(catalogPath)
            //Multiband
            val reader = hadoopValueReader.reader[SpatialKey, MultibandTile](layerId)
            //default x,y request is WebMercator format
            Some(reader.read(key))
        }
      } catch {
        case ex: ValueNotFoundError =>
          None
        case ex: AttributeNotFoundError =>
          None
      }
    //cache tile to buffer
    for (tile <- tileOpt) yield {
      val product: Tile = tile.color()
      val png: Png = product.renderPng()
      //return bytes
      val out: ServletOutputStream = httpServletResponse.getOutputStream()
      out.write(png.bytes)
    }
  }

  /**
    * 瓦片方式存储
    *
    * @param sc
    * @param layer
    * @param inputPath
    * @param outputPath
    * @return
    */
  def writeTileLayer(implicit sc: SparkContext, layer: String, inputPath: String, outputPath: String) = {
    // Read the geotiff in as a single image RDD,
    // using a method implicitly added to SparkContext by
    // an implicit class available via the
    // "import geotrellis.spark.io.hadoop._ " statement.
    val inputRdd: RDD[(ProjectedExtent, MultibandTile)] =
    sc.hadoopMultibandGeoTiffRDD(inputPath)

    // Use the "TileLayerMetadata.fromRdd" call to find the zoom
    // level that the closest match to the resolution of our source image,
    // and derive information such as the full bounding box and data type.
    val (_, rasterMetaData) =
    TileLayerMetadata.fromRDD(inputRdd, FloatingLayoutScheme(512))

    // Use the Tiler to cut our tiles into tiles that are index to a floating layout scheme.
    // We'll repartition it so that there are more partitions to work with, since spark
    // likes to work with more, smaller partitions (to a point) over few and large partitions.
    val tiled: RDD[(SpatialKey, MultibandTile)] =
    inputRdd
      .tileToLayout(rasterMetaData.cellType, rasterMetaData.layout, Bilinear)
      .repartition(50)

    // We'll be tiling the images using a zoomed layout scheme
    // in the web mercator format (which fits the slippy map tile specification).
    // We'll be creating 256 x 256 tiles.
    val layoutScheme = ZoomedLayoutScheme(WebMercator, tileSize = 256)

    // We need to reproject the tiles to WebMercator
    val (zoom, reprojected): (Int, RDD[(SpatialKey, MultibandTile)] with Metadata[TileLayerMetadata[SpatialKey]]) =
      MultibandTileLayerRDD(tiled, rasterMetaData)
        .reproject(WebMercator, layoutScheme, Bilinear)

    if (outputPath.startsWith("hdfs://")) {
      val hdfsOutPath = new org.apache.hadoop.fs.Path(outputPath)
      // Create the attributes store that will tell us information about our catalog.
      val attributeStore = HadoopAttributeStore(hdfsOutPath)
      // Create the writer that we will use to store the tiles in the local catalog.
      val writer = HadoopLayerWriter(hdfsOutPath, attributeStore)
      // Pyramiding up the zoom levels, write our tiles out to the local file system.
      Pyramid.upLevels(reprojected, layoutScheme, zoom, Bilinear) { (rdd, z) =>
        val layerId = LayerId(layer, z)
        // If the layer exists already, delete it out before writing
        if (attributeStore.layerExists(layerId)) {
          new HadoopLayerManager(attributeStore).delete(layerId)
        }
        writer.write(layerId, rdd, ZCurveKeyIndexMethod)
      }
    } else {
      // Create the attributes store that will tell us information about our catalog.
      val attributeStore = FileAttributeStore(outputPath)
      // Create the writer that we will use to store the tiles in the local catalog.
      val writer = FileLayerWriter(attributeStore)
      // Pyramiding up the zoom levels, write our tiles out to the local file system.
      Pyramid.upLevels(reprojected, layoutScheme, zoom, Bilinear) { (rdd, z) =>
        val layerId = LayerId(layer, z)
        // If the layer exists already, delete it out before writing
        if (attributeStore.layerExists(layerId)) {
          new FileLayerManager(attributeStore).delete(layerId)
        }
        writer.write(layerId, rdd, ZCurveKeyIndexMethod)
      }
    }
  }
}