package geotrellis.chatta

import com.typesafe.config.ConfigFactory
import geotrellis.spark.util.SparkUtils
import org.apache.spark.SparkConf
import geotrellis.spark._
import geotrellis.spark.tiling._
import geotrellis.spark.pipeline._
import geotrellis.spark.pipeline.json._
import geotrellis.spark.pipeline.json.read._
import geotrellis.spark.pipeline.json.transform._
import geotrellis.spark.pipeline.json.write._
import geotrellis.spark.pipeline.ast._
import geotrellis.spark.pipeline.ast.untyped.ErasedNode


object ChattaPipelineDSL {


  import org.apache.spark.SparkContext

  implicit val sc: SparkContext = SparkUtils.createSparkContext(
    "GeoTrellis ETL SinglebandIngest",
    new SparkConf(true)
      .set("spark.serializer", "org.apache.spark.serializer.KryoSerializer")
      .set("spark.kryo.registrator", "geotrellis.spark.io.kryo.KryoRegistrator")
      .setMaster(ConfigFactory.load().getString("spark.master"))
  )

  val scheme = Left[LayoutScheme, LayoutDefinition](FloatingLayoutScheme(512))
  //读取文件
  val jsonRead = JsonRead(
    "data/arg_wm/",
    `type` = ReadTypes.SpatialHadoopType
  )
  //
  val jsonTileToLayout = TileToLayout(
    `type` = TransformTypes.SpatialTileToLayoutType
  )
  //
  val jsonReproject = Reproject(
    "EPSG:3857",
    scheme,
    `type` = TransformTypes.SpatialBufferedReprojectType
  )
  //
  val jsonPyramid = Pyramid(`type` = TransformTypes.SpatialPyramidType)
  //
  val jsonWrite = JsonWrite(
    "mask",
    "file:/E:/geotrellis-chatta-demo/service/geotrellis/data/chatta-demo-pipeline-dsl",
    PipelineKeyIndexMethod("zorder"), scheme, `type` = WriteTypes.SpatialType)

  val list: List[PipelineExpr] = jsonRead ~ jsonTileToLayout ~ jsonReproject ~ jsonPyramid ~ jsonWrite

  // typed way, as in the JSON example above
  val typedAst: Node[Stream[(Int, TileLayerRDD[SpatialKey])]] =
    list
      .node[Stream[(Int, TileLayerRDD[SpatialKey])]]

  def main(args: Array[String]): Unit = {
    val result: Stream[(Int, TileLayerRDD[SpatialKey])] = typedAst.eval
  }

}
