package com.wangwg.sparkTest.Analysis.Shp

import java.io.File
import java.util.UUID

import com.wangwg.sparkTest.utils.GeometryUtils
import org.apache.hadoop.conf.Configuration
import org.apache.spark.sql.{SQLTypes, SparkSession}
import org.apache.spark.{SparkContext, TaskContext}
import org.geotools.data.shapefile.{ShapefileDataStore, ShapefileDataStoreFactory}
import org.geotools.data.{FeatureWriter, Query, Transaction}
import org.locationtech.geomesa.spark.jts._
import org.locationtech.geomesa.spark.{GeoMesaSpark, GeoMesaSparkKryoRegistrator, SpatialRDD}
import org.locationtech.jts.geom.{Coordinate, Geometry, GeometryFactory}
import org.opengis.feature.simple.{SimpleFeature, SimpleFeatureType}

import scala.collection.JavaConversions._
import scala.reflect.io.Directory

/**
 * geomesa 裁剪shp文件数据源
 * 空间分析
 */
object GeomesaClipShp {
  def main(args: Array[String]): Unit = {
    val sparkSession = SparkSession.builder()
      .appName("GeomesaClipShp")
      .config("spark.sql.crossJoin.enabled", "true")
      .config("spark.serializer", "org.apache.spark.serializer.KryoSerializer")
      .config("spark.kryo.registrator", classOf[GeoMesaSparkKryoRegistrator].getName)
      .master("local[*]")
      .getOrCreate()
      .withJTS
    SQLTypes.init(sparkSession.sqlContext)

    //打开数据源
    var params: Map[String, String] = Map()
    var File = new File("D:\\work\\bigdata\\TestShp")
    params += ("url" -> File.toURI.toURL.toString)
    params += ("geotools" -> "true")
    val typName = "1775a16a-e47c-4c70-bbcc-c75773862f7f_0"
    val query = new Query(typName)
    val rdd = GeoMesaSpark(params).rdd(new Configuration(), sparkSession.sparkContext, params, query)
    printFeature(sparkSession.sparkContext, rdd)
  }

  def printFeature(sparkContext: SparkContext, rdd: SpatialRDD): Unit = {
    println("默认分区数:".concat(rdd.getNumPartitions.toString))
    val shpCount = rdd.count();
    var rddCount = shpCount / 50;
    if (shpCount - rddCount * 50 > 0) {
      rddCount = rddCount + 1;
    }
    if (rddCount < 1) {
      rddCount = 1;
    }
    val reRdd = rdd.repartition(rddCount.toInt) //将rdd重新分区
    println("现有分区数:".concat(reRdd.getNumPartitions.toString))
    val resultPath = "D:\\work\\bigdata\\".concat(sparkContext.getConf.getAppId) //创建当前应用的输出目录
    val directory = Directory(resultPath);
    directory.createDirectory(true, false); //创建目录

    val geometryFactory = new GeometryFactory
    val coordinates = new Array[Coordinate](5)
    coordinates(0) = new Coordinate(11948262, 4037425)
    coordinates(1) = new Coordinate(11951961, 4037425)
    coordinates(2) = new Coordinate(11951961, 4041089)
    coordinates(3) = new Coordinate(11948262, 4041089)
    coordinates(4) = new Coordinate(11948262, 4037425)
    val clipGeometry = geometryFactory.createPolygon(coordinates)

    val clipGeometryBroadcast = sparkContext.broadcast(clipGeometry)
    val pathBroadcast = sparkContext.broadcast(resultPath) //分发公共参数
    val nameBroadcast = sparkContext.broadcast(UUID.randomUUID.toString)
    reRdd.foreachPartition { iter =>
      import java.io.{File, Serializable}
      import java.util
      val path = pathBroadcast.value.concat("\\").concat(nameBroadcast.value).concat("_").concat(TaskContext.getPartitionId.toString).concat(".shp")
      val file = new File(path)
      val featureparams: util.Map[String, Serializable] = new util.HashMap[String, Serializable]
      featureparams.put(ShapefileDataStoreFactory.URLP.key, file.toURI.toURL)
      var featurewriter: FeatureWriter[SimpleFeatureType, SimpleFeature] = null;
      var featureshapefileDataStore: ShapefileDataStore = null
      iter.foreach(item => {
        try {
          var originGeometry: Geometry = item.getDefaultGeometry.asInstanceOf[Geometry]
          if (!originGeometry.isValid) {
            originGeometry = GeometryUtils.validate(originGeometry)
          }
          val targetGeometry = clipGeometryBroadcast.value.asInstanceOf[Geometry]
          if (originGeometry != null && originGeometry.intersects(targetGeometry)) {
            val resultGeometry: Geometry = originGeometry.intersection(targetGeometry)
            if (resultGeometry != null) {
              if (featurewriter == null) {
                featureshapefileDataStore = new ShapefileDataStoreFactory().createNewDataStore(featureparams).asInstanceOf[ShapefileDataStore]
                featureshapefileDataStore.createSchema(item.getFeatureType)
                featurewriter = featureshapefileDataStore.getFeatureWriterAppend(Transaction.AUTO_COMMIT)
              }
              var simpleFeature = featurewriter.next()
              simpleFeature.setAttributes(item.getAttributes)
              simpleFeature.setDefaultGeometry(resultGeometry)
              featurewriter.write()
            }
          }
        } catch {
          case e: Exception => {
            e.printStackTrace()
          }
        }
      }
      )
      featurewriter.close()
      featureshapefileDataStore.dispose()
    }
  }
}
