package cn.doitedu.day03

import cn.doitedu.day01.utils.SparkUtil
import cn.doitedu.day03.beans.Student
import org.apache.spark.rdd.RDD

/**
 * @Date 22.3.31
 * @Created by HANGGE
 * @Description
 */
object C01_转换算子_Map {
  def main(args: Array[String]): Unit = {
    // 获取环境
    val sc = SparkUtil.getSc
    // 加载文件
    val rdd1 = sc.textFile("data/students.csv")
    // 调用算子  转换   不会触发计算
    // 1) 将每行数据切割
    val rdd2: RDD[Array[String]] = rdd1.map(line => line.split(","))


    // 2) 将每行数据切割 处理   封装在Student类中
    val res1: RDD[Student] = rdd1.map(line=>{
      val arr = line.split(",")
      //7,七娃,15,M,99,doit30
    Student(arr(0).toInt, arr(1), arr(2).toInt, arr(3), arr(4).toDouble, arr(5))
    })

    // 3)将每行数据处理成元组
    val res2: RDD[(Int, String, Int, String, Double, String)] = rdd1.map(line=>{
      val arr = line.split(",")
      (arr(0).toInt, arr(1), arr(2).toInt, arr(3), arr(4).toDouble, arr(5))
    })

    //4) 将每行封装成Map集合  name->score
    val res3 = rdd1.map(line=>{
      val arr = line.split(",")
     (arr(1), arr(4).toDouble)
    })

    // 调用行动算子
    res1.foreach(println)
    res2.foreach(println)
    res3.foreach(println)

    // 释放资源
    sc.stop()
  }

}
