package com.shujia.spark.opt

import org.apache.spark.SparkContext
import org.apache.spark.rdd.RDD
import org.apache.spark.sql.SparkSession
import org.apache.spark.storage.StorageLevel

object Demo6Kryo {
  def main(args: Array[String]): Unit = {
    val spark: SparkSession = SparkSession
      .builder()
      .master("local")
      .appName("cache")
      .config("spark.sql.shuffle.partitions", 1)
      //修改spark序列化方式，改成kryo
      .config("spark.serializer", "org.apache.spark.serializer.KryoSerializer")
      //编写一个注册类
      .config("spark.kryo.registrator", "com.shujia.spark.opt.Demo7KryoRegister")
      .getOrCreate()

    val sc: SparkContext = spark.sparkContext
    val linesRDD: RDD[String] = sc.textFile("data/students.txt")

    //将每一行转换成一个自定义类的对象
    val studentRDD: RDD[Student] = linesRDD.map((line: String) => {
      val split: Array[String] = line.split(",")
      Student(split(0), split(1), split(2).toInt, split(3), split(4))
    })


    //缓存
    studentRDD.persist(StorageLevel.MEMORY_ONLY_SER)

    studentRDD.foreach(println)
    studentRDD.foreach(println)

    while (true) {}


  }

  case class Student(id: String, name: String, age: Int, gender: String, clazz: String)

}
