package com.xiaoxu.spark_base.example.RDD

import org.apache.spark.{SparkConf, SparkContext}

object CreateRDD {

  private val conf: SparkConf = new SparkConf()
    .setAppName("CreateRDD")
    .setMaster("local[2]")
  private val sc = new SparkContext(conf)


  /**
    * scala 集合/本地数据
    */
  def map1(): Unit ={
    val data = Array(1, 2, 3, 4, 5)
    //通过sc把数据分发到各个节点上
    // parallelize：Distribute a local Scala collection to form an RDD.
    val rdd = sc.parallelize(data)
    val mapRdd = rdd.map(line => line+1)
    //Return an array that contains all of the elements in this RDD.
    val array = mapRdd.collect()
    array.foreach(println)
  }

  /**
    * 使用外部数据源创建rdd
    */

  def map2(): Unit ={
    val rdd = sc.textFile("data/practiceOperator/people.txt")
    val mapRdd = rdd.map(line => line.length)
    val array = mapRdd.collect()
    array.foreach(println)
  }

  def reduce(): Unit ={
    val rdd = sc.textFile("data/practiceOperator/people.txt")
    val mapRdd = rdd.map(line => line.length)//11,8,10
    val reduceResult = mapRdd.reduce((x,y) => x+y)
   println(reduceResult)
  }



  def main(args: Array[String]): Unit = {




    reduce()

    sc.stop()

  }



}
