package com.wangwg.sparkTest.hdfs

import org.apache.hadoop.conf.Configuration
import org.apache.hadoop.fs.{FileSystem, Path}
import org.apache.spark.{SparkConf, SparkContext, TaskContext}
import org.geotools.data.{DataStoreFinder, Query, Transaction}
import org.locationtech.geomesa.fs.data.{FileSystemDataStore, FileSystemDataStoreFactory}
import org.locationtech.geomesa.spark.{GeoMesaSpark, GeoMesaSparkKryoRegistrator}

import scala.collection.JavaConversions._

object GeomesaHdfsRead {
  def main(args: Array[String]): Unit = {
    /*   val dsParams = Map(
         "fs.path" -> "hdfs://geomesa1.com:9000/data/landuse/")
       val ds = DataStoreFinder.getDataStore(dsParams).asInstanceOf[FileSystemDataStore]

       val sft = SimpleFeatureTypes.createType("test", s"name:String,age:Int,dtg:Date,the_geom:Point:srid=4326")
       sft.setScheme("daily")
       sft.setEncoding("parquet")
       sft.setLeafStorage(false)
       ds.createSchema(sft)*/


    val sparkConf = new SparkConf().setMaster("local[*]").setAppName("GeomesaHdfsRead");
    sparkConf.set("spark.serializer", "org.apache.spark.serializer.KryoSerializer")
    sparkConf.set("spark.kryo.registrator", classOf[GeoMesaSparkKryoRegistrator].getName)
    val sparkContext = SparkContext.getOrCreate(sparkConf);

    val hdfsUrl = "hdfs://geomesa1.com:9000"
    val hdfsConf: Configuration = new Configuration();
    hdfsConf.set("fs.defaultFS", hdfsUrl)
    val hdfs = FileSystem.get(hdfsConf)
    val shpName = "landuse"

    val path = new Path("/data/".concat(shpName))

    var outputParams: Map[String, String] = Map()
    outputParams += ("fs.path" -> "hdfs://geomesa1.com:9000/data/landuse")
    val fileName = shpName.concat("_").concat(TaskContext.getPartitionId().toString)
    val dataStore = DataStoreFinder.getDataStore(outputParams).asInstanceOf[FileSystemDataStore]
    val query = new Query("444f3279-cd46-48ae-a0f8-f15453f15c86_0")
    val rdd = GeoMesaSpark(outputParams).rdd(new Configuration(), sparkContext, outputParams, query)
    //val d: FileSystemDataStore
    FileSystemDataStoreFactory

    val featureWriter = dataStore.getFeatureWriter(shpName, Transaction.AUTO_COMMIT)

    /*  val simpleFeatureTypeBuilder = new SimpleFeatureTypeBuilder
      simpleFeatureTypeBuilder.setName(fileName)
      simpleFeatureTypeBuilder.add("the_geom", classOf[Polygon])
      simpleFeatureTypeBuilder.add("dkbh", classOf[String])
      simpleFeatureTypeBuilder.add("dkmc", classOf[String])

      val simpleFeatureType = simpleFeatureTypeBuilder.buildFeatureType()
      //simpleFeatureType.setScheme(shpName,Map("dkmc"->""))
      simpleFeatureType.setScheme("daily", Map("dtg-attribute" -> "dtg"))
      ConfigurationUtils.setEncoding(simpleFeatureType, "UTF-8")
      //ConfigurationUtils.setScheme(simpleFeatureType, ,Collections.emptyMap)
      dataStore.createSchema(simpleFeatureType)

      val featureWriter = dataStore.getFeatureWriter(shpName, Transaction.AUTO_COMMIT)*/
    //featureWriter.close()
  }
}
