import org.apache.spark.SparkContext
import org.apache.spark.SparkConf


object LinkWithSpark {

  val conf = new SparkConf().setAppName("appName").setMaster("local[*]")
  val sc: SparkContext = new SparkContext(conf)
  def sumOfArray() = {
    val data = Array(1,2,3,4,5)
    val distData = sc.parallelize(data)
    val sumed: Int = distData.reduce((a, b) => a + b)
    println(sumed)
  }

  def main(args: Array[String]): Unit = {
    sumOfArray()
  }
}
