package cn.doitedu.dwetl.utls

import org.apache.commons.lang3.RandomStringUtils
import org.apache.spark.sql.SparkSession
import org.apache.spark.storage.StorageLevel

import scala.collection.immutable

object Hehe {

  def main(args: Array[String]): Unit = {

    val spark = SparkSession.builder().appName("").master("local").getOrCreate()
    val sc = spark.sparkContext

    val rdd = sc.textFile("/sparktest/csv.txt")
    rdd.cache()
    rdd.count()
    rdd.unpersist()

    rdd.persist(StorageLevel.MEMORY_ONLY_SER)
    rdd.count()

    val rdd2 = rdd.flatMap(s=>{
      val x: Seq[String] = for(i <- 1 to 1000000000) yield RandomStringUtils.random(10)
      x.sortBy(_.toUpperCase())
    })



    val df = spark.read.csv("/sparktest/csv.txt")
    df.cache()
    df.show(10,false)


    df.persist(StorageLevel.MEMORY_ONLY_SER)
    df.show(10,false)
    df.unpersist()





    val df2 = spark.read.csv("/sparktest/d2g.txt")
    df2.persist(StorageLevel.MEMORY_ONLY_SER)
    df2.show(10,false)


    val str = spark.conf.get("spark.rdd.compress")
  }

}
