package com.wangwg.sparkTest

import org.apache.hadoop.conf.Configuration
import org.apache.spark.{SparkConf, SparkContext}
import org.geotools.data.{DataStoreFinder, Query}
import org.locationtech.geomesa.hbase.data.HBaseDataStore
import org.locationtech.geomesa.spark.GeoMesaSpark

import scala.collection.JavaConversions._

object GeomesaReadTable {
  def main(args: Array[String]): Unit = {
    //val sparkConf = new SparkConf().setMaster("spark://geomesa1.com:7077").setAppName("GeomesaReadTable");
    val sparkConf = new SparkConf().setMaster("local[*]").setAppName("GeomesaReadTable");
    val sparkContext = SparkContext.getOrCreate(sparkConf);
    val params = Map("hbase.zookeepers" -> "geomesa1.com:2181,geomesa2.com:2181,geomesa3.com:2181", "hbase.catalog" -> "Test");
    val query = new Query("TestPoint2");
    val spatialRDDProvider = GeoMesaSpark(params);
    val hBaseDataStore = DataStoreFinder.getDataStore(params).asInstanceOf[HBaseDataStore]
    val rdd = spatialRDDProvider.rdd(new Configuration(), sparkContext, params, query);
    println(rdd.schema);
    rdd.collect().foreach(record => {
      println(record)
    });
    println("close")
    hBaseDataStore.dispose();
  }
}
