package kevin.bigdata.spark.test2

import org.apache.spark.{SparkConf, SparkContext}

/**
 * @author: kevin  
 * @date: 8:29 2021/4/30
 */
object SimpleApp {
  def main(args: Array[String]): Unit = {
    val logFile = "hdfs://bigdata:9000/user/hadoop/merge.txt"
    val conf = new SparkConf().setAppName("Simple Application").setMaster("local[2]").set("spark.testing.memory","2147480000")
    val sc = new SparkContext(conf)
    val  logData = sc.textFile(logFile, 2).cache()
    val num = logData.flatMap(x => x.split(" ")).filter(_.contains("N")).count()
    println("Words with N: %s".format(num))
    sc.stop()

  }
}
