package com.wangwg.sparkTest.Analysis.Shp

import java.io.File
import java.util.UUID

import com.wangwg.sparkTest.utils.GeometryUtils
import org.apache.hadoop.conf.Configuration
import org.apache.spark.broadcast.Broadcast
import org.apache.spark.rdd.RDD
import org.apache.spark.sql.{SQLTypes, SparkSession}
import org.apache.spark.{SparkContext, TaskContext}
import org.geotools.data.shapefile.{ShapefileDataStore, ShapefileDataStoreFactory}
import org.geotools.data.{FeatureWriter, Query, Transaction}
import org.locationtech.geomesa.features.ScalaSimpleFeatureFactory
import org.locationtech.geomesa.spark.jts._
import org.locationtech.geomesa.spark.{GeoMesaSpark, GeoMesaSparkKryoRegistrator, SpatialRDD}
import org.locationtech.jts.geom.Geometry
import org.locationtech.jts.index.SpatialIndex
import org.locationtech.jts.index.quadtree.Quadtree
import org.opengis.feature.simple.{SimpleFeature, SimpleFeatureType}

import scala.collection.JavaConversions._
import scala.collection.mutable.ListBuffer
import scala.reflect.io.Directory

/**
 * geomesa shp文件数据源
 * 两个shp数据源相交计算
 * 空间分析
 */
object GeomesaIntersetShp {
  def main(args: Array[String]): Unit = {
    var startTime = System.currentTimeMillis();
    val sparkSession = SparkSession.builder()
      .appName("GeomesaIntersetShp")
      .config("spark.driver.maxResultSize", "2g") //设置driver端结果存放的最大容量，这里设置成为2G，超过2G的数据,job就直接放弃，不运行了
      .config("spark.driver.memory", "4g") //driver给的内存大小
      .config("spark.executor.memory", "4g") // 每个executor的内存
      .config("spark.executor.cores", "1")
      .config("spark.default.parallelism", "200") //任务数量,默认是跟cpu核数一样
      .config("spark.sql.crossJoin.enabled", "true")
      .config("spark.serializer", "org.apache.spark.serializer.KryoSerializer")
      .config("spark.kryo.registrator", classOf[GeoMesaSparkKryoRegistrator].getName)
      .master("local[*]")
      .getOrCreate()
      .withJTS
    SQLTypes.init(sparkSession.sqlContext)

    //打开数据源
    //val sourceRdd = openShpRdd(sparkSession.sparkContext, "D:\\work\\bigdata\\Interset\\TDLYXZ", "TDLYXZ")
    val intersetRdd = openShpRdd(sparkSession.sparkContext, "D:\\work\\bigdata\\data\\buildings", "buildings_clip")
    val sourceRdd = openShpRdd(sparkSession.sparkContext, "D:\\work\\bigdata\\data\\landuse", "landuse_clip")
    //val intersetRdd = openShpRdd(sparkSession.sparkContext, "D:\\work\\bigdata", "gis_osm_landuse_a_free_1")

    val indexRdd = createQIndex(intersetRdd)
    val broadcastRdd = sparkSession.sparkContext.broadcast(indexRdd)
    intersetFeature(sparkSession.sparkContext, sourceRdd, broadcastRdd)
    val endTime = System.currentTimeMillis
    println((endTime - startTime) / 1000)
  }

  /**
   * 根据一个Rdd创建空间索引
   *
   * @param rdd
   **/
  def createRIndex(rdd: RDD[SimpleFeature]): SpatialIndex = {
    import org.locationtech.jts.index.strtree.STRtree
    val rddArray = rdd.collect()
    val stRtree = new STRtree(rddArray.size)
    val rddIterator = rddArray.iterator
    while (rddIterator.hasNext) {
      val item = rddIterator.next()
      val itemGeometry = item.getDefaultGeometry.asInstanceOf[Geometry]
      if (itemGeometry != null) {
        stRtree.insert(itemGeometry.getEnvelopeInternal, item)
      }
    }
    stRtree
  }

  /*
   * 根据一个Rdd创建空间索引
   * 4叉树索引
   *
   * @param rdd
   *            */
  def createQIndex(rdd: RDD[SimpleFeature]): SpatialIndex = {
    val rddArray = rdd.collect()
    val quadtree = new Quadtree()
    val rddIterator = rddArray.iterator
    while (rddIterator.hasNext) {
      val item = rddIterator.next()
      val itemGeometry = item.getDefaultGeometry.asInstanceOf[Geometry]
      if (itemGeometry != null) {
        quadtree.insert(itemGeometry.getEnvelopeInternal, item)
      }
    }
    quadtree
  }

  def openShpRdd(sparkContext: SparkContext, path: String, name: String): SpatialRDD = {
    var params: Map[String, String] = Map()
    var File = new File(path)
    params += ("url" -> File.toURI.toURL.toString)
    params += ("geotools" -> "true")
    val typName = name
    val query = new Query(typName)
    GeoMesaSpark(params).rdd(new Configuration(), sparkContext, params, query)
  }

  def repartRdd(rdd: RDD[SimpleFeature], count: Int): RDD[SimpleFeature] = {
    println("默认分区数:".concat(rdd.getNumPartitions.toString))
    val shpCount = rdd.count
    var rddCount = shpCount / count
    if (shpCount - rddCount * count > 0) rddCount = rddCount + 1
    if (rddCount < 1) rddCount = 1
    val reRdd = rdd.repartition(rddCount.toInt)
    println("现有分区数:".concat(reRdd.getNumPartitions.toString))
    reRdd
  }

  /**
   * 先将一个rdd数据收集起来分发到工作节点
   * 然后依次遍历计算
   * 这种计算方式问题很大，由于缺少空间索引的关系，导致所有的数据都要进行计算，
   * 需要排除调不需要的空间计算对象才行，否则计算性能很低很低
   * Geotools索引 R树计算 164秒 R
   * Geotools索引 Q树计算 43秒  四叉树对于空间区域查询效率高，特别熟数据分布较均匀
   * Geoemsa索引
   *
   * @param sparkContext
   * @param sourceRdd
   * @param intersetRddTree 索引化后的数据
   */
  def intersetFeature(sparkContext: SparkContext, sourceRdd: SpatialRDD, intersetRddTree: Broadcast[SpatialIndex]): Unit = {
    val reSourceRdd = sourceRdd
    val resultPath = "D:\\work\\bigdata\\Interset\\".concat(sparkContext.getConf.getAppId) //创建当前应用的输出目录
    val directory = Directory(resultPath);
    directory.createDirectory(true, false); //创建目录
    val resultRdd = reSourceRdd.mapPartitions(partion => {
      partion.flatMap(item => {
        var originGeometry: Geometry = item.getDefaultGeometry.asInstanceOf[Geometry]
        val intersetArray = ListBuffer[SimpleFeature]();
        if (originGeometry != null) {
          if (!originGeometry.isValid) {
            originGeometry = GeometryUtils.validate(originGeometry)
          }
          val queryItems = intersetRddTree.value.query(originGeometry.getEnvelopeInternal)
          if (queryItems != null) {
            val intesetIterator = queryItems.iterator()
            while (intesetIterator.hasNext) {
              val intesetFeature = intesetIterator.next().asInstanceOf[SimpleFeature]
              var targetGeometry: Geometry = intesetFeature.getDefaultGeometry.asInstanceOf[Geometry]
              if (targetGeometry != null) {
                if (targetGeometry.getEnvelope.intersects(originGeometry)) {
                  if (!targetGeometry.isValid) {
                    targetGeometry = GeometryUtils.validate(targetGeometry)
                  }
                  var resultGeometry: Geometry = null
                  if (originGeometry.contains(targetGeometry)) {
                    resultGeometry = targetGeometry;
                  } else if (originGeometry.intersects(targetGeometry)) {
                    resultGeometry = originGeometry.intersection(targetGeometry)
                  }
                  if (resultGeometry != null) {
                    val copySimpleFeature = ScalaSimpleFeatureFactory.copyFeature(item.getFeatureType, item, item.getID)
                    copySimpleFeature.setDefaultGeometry(resultGeometry)
                    intersetArray += copySimpleFeature;
                  }
                }
              }
            }
          }
        }
        intersetArray
      })
    })

    val reResultRdd = repartRdd(resultRdd, 50000);
    //val reResultRdd = resultRdd
    reResultRdd.foreachPartition(partition => {
      import java.io.{File, Serializable}
      import java.util
      val path = resultPath.concat("\\").concat(UUID.randomUUID.toString).concat("_").concat(TaskContext.getPartitionId.toString).concat(".shp")
      val file = new File(path)
      val featureparams: util.Map[String, Serializable] = new util.HashMap[String, Serializable]
      featureparams.put(ShapefileDataStoreFactory.URLP.key, file.toURI.toURL)
      var featurewriter: FeatureWriter[SimpleFeatureType, SimpleFeature] = null;
      var featureshapefileDataStore: ShapefileDataStore = null
      partition.foreach(item => {
        try {
          if (featurewriter == null) {
            featureshapefileDataStore = new ShapefileDataStoreFactory().createNewDataStore(featureparams).asInstanceOf[ShapefileDataStore]
            featureshapefileDataStore.createSchema(item.getFeatureType)
            featurewriter = featureshapefileDataStore.getFeatureWriterAppend(Transaction.AUTO_COMMIT)
          }
          var simpleFeature = featurewriter.next()
          simpleFeature.setAttributes(item.getAttributes)
          simpleFeature.setDefaultGeometry(item.getDefaultGeometry)
          featurewriter.write()
        } catch {
          case e: Exception => {
            e.printStackTrace()
          }
        }
      })
      if (featurewriter != null) {
        featurewriter.close()
        featureshapefileDataStore.dispose()
      }
    })
  }
}
