package com.zyx.sparkdemo.core

import java.util.concurrent.ConcurrentHashMap

import org.apache.spark.util.{AccumulatorV2, LongAccumulator}
import org.apache.spark.{SparkConf, SparkContext}

import scala.collection.mutable

object Test {

  def main(args: Array[String]): Unit = {
    val sparkConf = new SparkConf().setMaster("local[*]").setAppName("test")
    val sc = new SparkContext(sparkConf)

    val list = List(1 ,2 ,3, 4)
    val bc = sc.broadcast(1 to(1000, 2))
    sc.makeRDD(list, 2).filter(x => bc.value.contains(x)).collect.foreach(println)



    sc.stop()
  }


  class wcAccum extends AccumulatorV2[String, ConcurrentHashMap[String, Long]] {

    private var result: ConcurrentHashMap[String, Long] = new ConcurrentHashMap()

    override def isZero: Boolean = ???

    override def copy(): AccumulatorV2[String, ConcurrentHashMap[String, Long]] = ???

    override def reset(): Unit = ???

    override def add(v: String): Unit = ???

    override def merge(other: AccumulatorV2[String, ConcurrentHashMap[String, Long]]): Unit = ???

    override def value: ConcurrentHashMap[String, Long] = ???
  }

}
