package com.wangwg.sparkTest.Shp

import java.io.File

import org.apache.hadoop.conf.Configuration
import org.apache.spark.{SparkConf, SparkContext}
import org.geotools.data.{DataStore, DataStoreFinder, Query}
import org.geotools.feature.simple.SimpleFeatureTypeBuilder
import org.locationtech.geomesa.spark.{GeoMesaSpark, GeoMesaSparkKryoRegistrator}

import scala.collection.JavaConversions._

/**
 * spark shpfile数据批量如postgis
 */
object GeomesaShpToPostGIS {
  def main(args: Array[String]): Unit = {
    val sparkConf = new SparkConf().setMaster("local[*]").setAppName("GeomesaShpToPostGIS");
    sparkConf.set("spark.serializer", "org.apache.spark.serializer.KryoSerializer")
    sparkConf.set("spark.kryo.registrator", classOf[GeoMesaSparkKryoRegistrator].getName)
    val sparkContext = SparkContext.getOrCreate(sparkConf);

    var params: Map[String, String] = Map()
    //parameters += ("fs.path" -> "hdfs://localhost:9000/fs-root/")
    var File = new File("D:\\work\\bigdata\\TestShp")
    params += ("url" -> File.toURI.toURL.toString)
    params += ("geotools" -> "true")
    val query = new Query("1775a16a-e47c-4c70-bbcc-c75773862f7f_0")
    val rdd = GeoMesaSpark(params).rdd(new Configuration(), sparkContext, params, query)

    val saveParams = Map(
      "geotools" -> "true",
      "dbtype" -> "postgis",
      "host" -> "",
      "user" -> "",
      "passwd" -> "",
      "port" -> "5432",
      "database" -> "");
    var dataStore: DataStore = DataStoreFinder.getDataStore(saveParams);
    val simpleFeatureTypeBuilder = new SimpleFeatureTypeBuilder
    simpleFeatureTypeBuilder.init(rdd.schema)
    simpleFeatureTypeBuilder.setName("Hello")
    val simpleFeatureType = simpleFeatureTypeBuilder.buildFeatureType
    dataStore.createSchema(simpleFeatureType)
    GeoMesaSpark(saveParams).save(rdd, saveParams, "Hello")


    //org.geotools.data.DataStore dataStore = org.geotools.data.DataStoreFinder.getDataStore(parameters);
  }


}
