package com.sugon.ww

import org.apache.spark.{SparkConf, SparkContext}
import org.apache.spark.rdd.RDD
import org.apache.spark.sql.SparkSession
import org.apache.log4j.{Level, Logger}
import org.apache.spark.storage.StorageLevel


object Oper5 {

  def main(args: Array[String]): Unit = {

//    Logger.getLogger("org").setLevel(Level.ERROR)
    //本地环境需要加上
    val session: SparkSession = SparkSession.builder()
      .master("local[*]")
      .appName(this.getClass.getSimpleName)
      .getOrCreate()

    //    val value: RDD[String] = session.sparkContext.makeRDD(Array("3", "3", "4", "5", "5"))

    val value: RDD[Int] = session.sparkContext.parallelize(List(2, 2, 3, 4, 3, 2))

    //    value.sample(false, 0.5).collect().foreach(println)

    //    value.sample(withReplacement = false, 0.2)
    //
    //      // 数据采样
    //      .map(k => (k, 1)).reduceByKey((x, y) => x + y)
    //      .map(k => (k._2, k._1)).sortByKey(false) // 根据 key 出现次数进行排序
    //      .take(3)
    //等价于
    //    value.map(x => (x, 1)).reduceByKey((x, y) => x + y).map(_._1).foreach(println)

    //    print(value.reduce((x, y) => x + y))

    //    val intToLong: collection.Map[Int, Long] = value.countByValue()

    //    print(intToLong)

    val result = value.map(x => x * x)

    result.persist(StorageLevel.DISK_ONLY)

    println(result.count())

    println(result.collect().mkString(","))

    session.close()

  }


}
