package rdd

import org.apache.spark.rdd.RDD
import org.apache.spark.{SparkConf, SparkContext}

object RDD_AccumulatorExample {
  def main(args: Array[String]): Unit = {
    val conf = new SparkConf()
    conf.setMaster("local")
    conf.setAppName("RDD_AccumulatorExample")

    val sc = new SparkContext(conf)
    val starCounter = sc.longAccumulator("star_counter")

    val resultRDD: RDD[String] = sc
      .textFile("data/sample.csv")
      .filter(line => !line.startsWith("SEQ,"))
      .map(line => {
        line.split(",")
      })
      .filter(arr => {
        if (arr(6) == null || arr(6).trim.isEmpty) {
          starCounter.add(1)
          false
        } else true
      })
      .map(arr => arr.mkString(","))

    println(s"过滤后记录数${resultRDD.count()}")
    println(s"删除的星级字段缺失记录数是：${starCounter.value}")


    sc.stop()
  }
}
