package com.zjol

import org.apache.spark.rdd.RDD
import org.apache.spark.{SparkConf, SparkContext}

/**
  * Created by Administrator on 2017/3/28.
  */
object helloSpark {


  var conf=new SparkConf().setAppName("hello").setMaster("local")
  var sc=new SparkContext(conf);


  def main(args: Array[String]): Unit = {
     val rddInt=sc.parallelize(List(1,2,3,4,5,6),1)


     val rddAggr1: (Int, Int) = rddInt.aggregate((0, 0))((x, y) => (x._1 + y, x._2 + 1), (x, y) => (x._1 + y._1, x._2 + y._2))
     println("====aggregate 1====:" + rddAggr1.toString()) // (15,5)



  }

  def  rddOp: Unit ={
    val rddInt:RDD[Int]=sc.makeRDD(List(1,2,3,4,5,6,7,2,5,1))
    var rddStr:RDD[String]=sc.parallelize(Array("a","b","c","d","a","b","a"))
    val rddFile:RDD[String]=sc.textFile("D:\\spark\\rdd.txt")


    val rdd01:RDD[Int] = sc.makeRDD(List(1,3,5,3))
    val rdd02:RDD[Int] = sc.makeRDD(List(2,4,5,1))

    println(rddInt.map(x=>x+1).collect().mkString(","))

    println(rddInt.filter(x=>x<5).collect().mkString(","))


    println(rddFile.flatMap{x=>x.split(",")}.collect().mkString("\n"))

    println(rddInt.distinct().collect().mkString(","))


    println(rddInt.union(rdd01).collect().mkString(","))
    println(rddInt.intersection(rdd01).collect().mkString(","))
    println(rddInt.subtract(rdd01).collect().mkString(","))
    println(rddInt.cartesian(rdd01).collect().mkString(","))


    println(rddInt.count())
    println(rddInt.countByValue())
    println(rddInt.reduce((a,b)=>a+b))
    println(rddInt.fold(1)((a,b)=>a+b))
  }

}
