//项目模板库
//git clone git@github.com:geotrellis/geotrellis-sbt-template
//启动sbt后，输入console 进入Scala解释器

//创建一个点
import geotrellis.proj4.LatLng
import geotrellis.vector._
Point(0,0)

//===============================
//创建一个Tile（栅格）
import geotrellis.raster._
import geotrellis.raster.mapalgebra.focal._
val nd = NODATA  //该值因CellType而异
val input = Array[Int](
  nd, 7, 1, 1, 3, 5, 9, 8, 2,
  9, 1, 1, 2, 2, 2, 4, 3, 5,
  3, 8, 1, 3, 3, 3, 1, 2, 2,
  2, 4, 7, 1, nd, 1, 8, 4, 3
)
val iat = IntArrayTile(input,9,4)

iat.asciiDraw()

//==================================
//地图代数操作--焦点平均操作
val focalNeighborhood = Square(1) //一个3x3 平方的领域
/**
  * 0 0 0
  * 0 0 0
  * 0 0 0
  */
val meanTile = iat.focalMean(focalNeighborhood)
meanTile.asciiDrawDouble()
meanTile.getDouble(0,0)
//焦点求和
val sumTile = iat.focalSum(focalNeighborhood)
sumTile.asciiDrawDouble()
iat.focalConway().asciiDrawDouble()
iat.focalMax(focalNeighborhood).asciiDrawDouble()
iat.focalMedian(focalNeighborhood).asciiDrawDouble()
iat.focalMode(focalNeighborhood).asciiDrawDouble()
iat.focalMin(focalNeighborhood).asciiDrawDouble()
iat.focalStandardDeviation(focalNeighborhood).asciiDrawDouble()

//区域操作
iat.zonalStatisticsDouble(iat)
iat.zonalHistogramDouble(iat)
iat.zonalHistogramInt(iat)
iat.zonalPercentage(iat)


//Map操作
import geotrellis.raster._
val tile1: Tile = ???
val tile2: Tile = ???
//local操作
//当两个图有一样的尺寸时
tile1.localAdd(tile2)
//相当于
tile1 + tile2

//重新分类
tile1.map(z => z+1)
tile2.mapDouble(z => z + 1.1)
//根据单元格类型调用不同的map
tile1.dualMap({z => z+1})({z => z + 1.1})

//可以直接使用combine函数定义local操作
tile1.dualCombine(tile2)
{(z1,z2) => z1 + z2}
{(z1,z2) => z1 + z2}

//以下是包含地图代数运行的地方
import geotrellis.raster.mapalgebra.local
import geotrellis.raster.mapalgebra.focal
import geotrellis.raster.mapalgebra.zonal
//与Spark一起使用
//启动sbt，发出test:console
//使用以下构造SparkContext
//
//val conf = new org.apache.spark.SparkConf()
//conf.setMaster("local[*]")
//implicit val sc = geotrellis.spark.util.SparkUtils.createSparkContext("Test",conf)
//
//sc.parallelize(Array(1,2,3))

//可以使用以下命令执行spark jar包
//spark-shell --conf spark.serializer=org.apache.spark.serializer.KryoSerializer --jars <jarfile>
//而如果时远程可以使用spark-submit 提交jar包执行
//===================================================
//核密度构建栅格

import geotrellis.vector._
import scala.util._

val extent = Extent(-109,37,-102,41)
def randomPointFeature(extent: Extent): PointFeature[Double] = {
  def randInRange(low: Double, high: Double): Double = {
    val x = Random.nextDouble()
    low * (1-x) + high + x
  }

  Feature(Point(randInRange(extent.xmin, extent.xmax),
    randInRange(extent.ymin, extent.ymax)),
    Random.nextInt % 16 + 16)
}

val pts = (for (i <- 1 to 1000) yield randomPointFeature(extent)).toList

//CRS时坐标系统的说明
//geotrellis.proj4.CRS.fromName("EPSG:4326")
//上一句相当于
//geotrellis.proj4.LatLng

//创建一个包含核密度分析的Tile
import geotrellis.raster._
import geotrellis.raster.mapalgebra.focal.Kernel

val kernelWidth: Int = 9
val kern: Kernel = Kernel.gaussian(kernelWidth, 1.5, 25)
val kde: Tile = pts.kernelDensity(kern, RasterExtent(extent, 700, 400))

import geotrellis.raster.render._

val colorMap = ColorMap(
  (0 to kde.findMinMax._2 by 4).toArray,
  ColorRamps.HeatmapBlueToYellowToRedSpectrum
)
//生成有色阶的图片
kde.renderPng(colorMap).write("./test.png")
//生成带有地理坐标的tif文件
import geotrellis.raster.io.geotiff._
GeoTiff(kde,extent,LatLng).write("./test.tif")

//分割Tiles
//为了处理大型的tif文件

import geotrellis.spark.tiling._
//将tif切成7*4 每个有100*100个cell
val tl = TileLayout(7, 4, 100, 100)
//将tl和extent结合
val ld = LayoutDefinition(extent,tl)

def pointFeatureToExtent[D](kwidth: Double, ld: LayoutDefinition, ptf: PointFeature[D]): Extent = {
  val p = ptf.geom

  Extent(p.x - kwidth * ld.cellwidth / 2,
    p.y - kwidth * ld.cellheight /2,
    p.x + kwidth * ld.cellwidth / 2,
    p.y + kwidth * ld.cellheight / 2)
}
def ptfToExtent[D] (p: PointFeature[D]) = pointFeatureToExtent(9,ld,p)

import geotrellis.spark._
def  ptfToSpatialKey[D](ptf: PointFeature[D]): Seq[(SpatialKey,PointFeature[D])] = {
  val ptextent = ptfToExtent(ptf)
  val gridBounds = ld.mapTransform(ptextent)

  for {
    (c, r) <- gridBounds.coords
    if r < tl.totalRows
    if c < tl.totalCols
  }yield (SpatialKey(c,r), ptf)
}

val keyfeatures: Map[SpatialKey, List[PointFeature[Double]]] =
  pts.flatMap(ptfToSpatialKey).groupBy(_._1).map {case (sk,v) => (sk,v.unzip._2)}

val keytiles = keyfeatures.map{case (sk,pfs) => (
  sk, pfs.kernelDensity(
  kern,
  RasterExtent(ld.mapTransform(sk), tl.tileDimensions._1,tl.tileDimensions._2)
)
)}

import geotrellis.spark.stitch.TileLayoutStitcher

val tileList =
  for {
    r <- 0 until ld.layoutRows
    c <- 0 until ld.layoutCols
  } yield {
    val k = SpatialKey(c,r)
    (k,keytiles.getOrElse(k, IntArrayTile.empty(tl.tileCols,tl.tileRows)))
  }

val stitched = TileLayoutStitcher.stitch(tileList)._1

//通过spark分发计算
import org.apache.spark.{SparkConf,SparkContext}
val conf = new SparkConf().setMaster("local").setAppName("Kernel Density")
val sc = new SparkContext(conf)
import org.apache.spark.rdd.RDD
val pointRdd = sc.parallelize(pts,10)

import geotrellis.raster.density.KernelStamper
def stampPointFeature(tile: MutableArrayTile,
                      tup:(SpatialKey, PointFeature[Double])): MutableArrayTile = {
  val (spatialKey,pointFeature) = tup
  val tileExtent = ld.mapTransform(spatialKey)
  val re = RasterExtent(tileExtent, tile)
  val result = tile.copy.asInstanceOf[MutableArrayTile]
  KernelStamper(result, kern).stampKernelDouble(re.mapToGrid(pointFeature.geom), pointFeature.data)

  result
}

import geotrellis.raster.mapalgebra.local.LocalTileBinaryOp

object Adder extends LocalTileBinaryOp {
  override def combine(z1: Double, z2: Double): Double = {
    if (isNoData(z1)){
      z2
    }else if (isNoData(z2)){
      z1
    }else{
      z1 + z2
    }
  }

  override def combine(z1: Int, z2: Int) = {
    if (isNoData(z1)){
      z2
    }else if (isNoData(z2)){
      z1
    }else{
      z1 + z2
    }
  }
}

def sumTiles(t1:MutableArrayTile, t2: MutableArrayTile): MutableArrayTile = {
  Adder(t1,t2).asInstanceOf[MutableArrayTile]
}

//val tileRdd: RDD[(SpatialKey, Tile)] =
//  pointRdd
//    .flatMap(ptfToSpatialKey)
//    .mapPartitions({ partition =>
//      partition.map { case (spatialKey, pointFeature) =>
//        (spatialKey, (spatialKey, pointFeature))
//      }
//    }, preservesPartitioning = true)
//    .aggregateByKey(ArrayTile.empty(DoubleCellType, ld.tileCols, ld.tileRows))
//(stampPointFeature, sumTiles)
//  .mapValues{ tile: MutableArrayTile => tile.asInstanceOf[Tile] }
//
//import geotrellis.proj4.LatLng
//val metadata = TileLayerMetadata(DoubleCellType,
//  ld,
//  ld.extent,
//  LatLng,
//  KeyBounds(SpatialKey(0,0),
//    SpatialKey(ld.layoutCols - 1,
//      ld.layoutRows - 1)))
//
//val resultRdd = ContextRDD(tileRdd, metadata)

//读取Tif文件
import geotrellis.raster.io.geotiff.reader.GeoTiffReader
import geotrellis.raster.io.geotiff._

val path: String = "data/DevelopedLand.tiff"
//读取单波段
val geoTiff: SinglebandGeoTiff = GeoTiffReader.readSingleband(path)
//or
val geoTiff2: SinglebandGeoTiff = SinglebandGeoTiff(path)
//读取多波段
val multiTiff : MultibandGeoTiff = GeoTiffReader.readMultiband(path)
//or
val multiTiff2: MultibandGeoTiff = MultibandGeoTiff(path)

//读取压缩的tif并保持压缩
val compressedGeoTiff: SinglebandGeoTiff = GeoTiffReader.readSingleband(path,false,false)
//读取压缩的tif并解压
val compositeTile2 = GeoTiffReader.readSingleband(path,true,false)

//裁剪,对象并不会完全读取到内存中，但是依然可以裁剪
val e: Extent = Extent(0,1,2,3)

SinglebandGeoTiff.streaming(path).crop(e)
//or
GeoTiffReader.readSingleband(path,false,true).crop(e)

MultibandGeoTiff.streaming(path).crop(e)
//or
GeoTiffReader.readMultiband(path,false,true).crop(e)

//专业术语
//Vector or Geometry: 矢量、几何体：点线面
//Extent or Bounding Box：轴对齐的矩形区域，就是一个长方形。。。
//Feature : 包含一些关联数据的几何体
//Cell ：
//Tile :
//Raster :
//RDD:
//Key:
//Layout Definition or Layout:
//Metadata or Layer Metadata：
//Layer or Tile Layer：
//Pyramid:
//Catalog

//核心包
import geotrellis.proj4  //投影和坐标
import geotrellis.raster //栅格数据处理
import geotrellis.vector //矢量数据处理

//分布式包
import geotrellis.spark

//存储端
import geotrellis.spark.io
//import geotrellis.accumulo
//import geotrellis.cassandra
//import geotrellis.geomesa
//import geotrellis.hbase
//import geotrellis.s3

import geotrellis.vector.io._
Point(1,1).toGeoJson





