package com.suddev.bigdata.core

import org.apache.spark.serializer.KryoSerializer
import org.apache.spark.{SparkConf, SparkContext}
import org.datasyslab.geospark.enums.FileDataSplitter
import org.datasyslab.geospark.serde.GeoSparkKryoRegistrator
import org.datasyslab.geospark.spatialRDD.PointRDD


object DemoApp {
  def main(args: Array[String]): Unit = {
    // 创建SparkConf
    val conf = new SparkConf().
      setAppName("GeoSparkDemo1").
      setMaster("local[*]").
      set("spark.serializer", classOf[KryoSerializer].getName).
      set("spark.kryo.registrator", classOf[GeoSparkKryoRegistrator].getName)
    val sc = new SparkContext(conf)

    val pointRDDInputLocation = "data/checkin.csv"
    // 这个变量控制我们的地理经度和纬度在数据的哪两列，我们这里是第0,1列，Offset就设置为0
    val pointRDDOffset = 0
    val pointRDDSplitter = FileDataSplitter.CSV
    // 这个参数允许我们除了经纬度外还可以携带其他自定义数据
    val carryOtherAttributes = true
    val objectRDD = new PointRDD(sc, pointRDDInputLocation,pointRDDOffset, pointRDDSplitter, carryOtherAttributes)
    // 获取rawRDD进行遍历输出
    objectRDD.rawSpatialRDD.rdd.collect().foreach(println)
  }
}
