package com.hngy.scala

import org.apache.spark.{SparkConf, SparkContext}

/**
  * 需求：测试内存占用情况
  */
object TestMemoryScala {

  def main(args: Array[String]): Unit = {
    val conf = new SparkConf()
    conf.setAppName("TestMemoryScala")
      .setMaster("local")
    val sc = new SparkContext(conf)

    val dataRDD = sc.textFile("hdfs://hadoop001:9001/word.txt")
    dataRDD.count()
    //while循环是为了保证程序不结束，方便在本地查看4040页面中的storage信息
    while (true){
      ;
    }

    sc.stop()

  }
}
