package com.shujia.opt

import org.apache.spark.{SparkConf, SparkContext}
import org.apache.spark.rdd.RDD
import org.apache.spark.serializer.KryoSerializer
import org.apache.spark.storage.StorageLevel




object Code02Serializable {

  def main(args: Array[String]): Unit = {
    val conf: SparkConf = new SparkConf()
      .setMaster("local")
//      .set("spark.serializer", "org.apache.spark.serializer.KryoSerializer")
//      .set("spark.kryo.registrator", "com.shujia.opt.MySerializable")
      .setAppName("Mysql2Text")
    val sc = new SparkContext(conf)

    val mapRes: RDD[SerStu] = sc
      .textFile("spark_code/data/students.txt")
      .map {
        case oneLine => {
          val splitRes: Array[String] = oneLine.split(",")
          (splitRes(0), (splitRes(1), splitRes(2), splitRes(3), splitRes(4)))
//          new SerStu2(splitRes(0), splitRes(1), splitRes(2).toInt, splitRes(3), splitRes(4))
          SerStu(splitRes(0), splitRes(1), splitRes(2).toInt, splitRes(3), splitRes(4))
        }
      }
//    mapRes.cache() // 对其进行缓存操作
    mapRes.persist(StorageLevel.MEMORY_AND_DISK_SER)
    mapRes
      .groupBy(_.clazz)
      .mapValues(_.size)
      .foreach(println)


    while (true){}

  }
}

case class SerStu(id:String,name:String,age:Int,gender:String,clazz:String)

/**
 * object not serializable (class: com.shujia.opt.SerStu2, value: com.shujia.opt.SerStu2@5981f8ea)
 * 对于序列化类不能直接在RDD内部进行构建
 */
class SerStu2(id:String,name:String,age:Int,gender:String,clazz:String) {
  var _id:String = id
  var _name:String = name
  var _age:Int = age
  var _gender:String = gender
  var _clazz:String = clazz
}
