

import java.util.Random

import com.vividsolutions.jts.geom.{Coordinate, Envelope, GeometryFactory}
import org.apache.log4j.{Level, Logger}
import org.apache.spark.storage.StorageLevel
import org.apache.spark.{SparkConf, SparkContext}
import org.datasyslab.geospark.spatialOperator.RangeQuery
import org.datasyslab.geospark.spatialRDD.RectangleRDD

import scala.util.control.Breaks


/**
  * Created by Dell on 2016/12/9 0009.
  */
object Main {


  //远程调用spark平台
  def main(args: Array[String]): Unit = {
    Logger.getLogger("org").setLevel(Level.OFF)
    Logger.getLogger("akka").setLevel(Level.OFF)
    //读取本地文件出现问题 远程路径也出现了问题
    //需要将设置改为local 本地运行才行
    val sparkConf = new SparkConf().setAppName("spatialSpark").setMaster("local")
      .setJars(List("C:\\Users\\Dell\\IdeaProjects\\sparkSpatial\\out\\artifacts\\sparkSpatial_jar\\sparkSpatial.jar"))
      .set("spark.executor.memory", "64g").set("spark.executor.memory", "64g")
    val sc = new SparkContext(sparkConf)

    //读取wkit文件
    val fact = new GeometryFactory();
    val queryPoint = fact.createPoint(new Coordinate(-84.01, 34.01));
    var queryEnvelope = new Envelope(-90.01, -80.01, 30.01, 40.01);



    //测试
    val random = new Random();
    val randomNumber = random.nextInt(10) + random.nextDouble();
    queryEnvelope = new Envelope(-90.01 + randomNumber, -80.01 + randomNumber, 30.01 + randomNumber, 40.01 + randomNumber);
    val objectRDD = new RectangleRDD(sc, "file:\\\\\\D:\\zcta510-small.csv", 0, "csv");
    objectRDD.rawRectangleRDD.persist(StorageLevel.MEMORY_ONLY);
    val loop = new Breaks;
    loop.breakable {
      for (i <- 1 to 10) {
        RangeQuery.SpatialRangeQuery(objectRDD, queryEnvelope, 0).getRawRectangleRDD().count();
      }
    }
  }
}
