package com.shujia.spark.core

import org.apache.spark.rdd.RDD
import org.apache.spark.{SparkConf, SparkContext}

object Demo3Base {
  def main(args: Array[String]): Unit = {
    //1、创建环境
    val conf = new SparkConf()
    //4:4核
    conf.setMaster("local")
    conf.setAppName("base")
    val sc = new SparkContext(conf)

    val linesRDD: RDD[String] = sc.textFile("data/students.txt")

    case class Student(id: String, name: String, age: Int, sex: String, clazz: String)

    /**
     * 转换算子：懒执行：不会立马执行，需要操作算子触发执行
     * 数据是一条一条在整个代码逻辑中流动
     */
    println("1")
    //1、map：传入一行返回一行
    val studentsRDD: RDD[Student] = linesRDD
      .map(_.split(","))
      .map {
        case Array(id, name, age, sex, clazz) =>
          println("map")
          Student(id, name, age.toInt, sex, clazz)
      }

    println("2")

    //2、filter:函数返回True保留数据，返回false过滤数据
    val filterRDD: RDD[Student] = studentsRDD
      .filter(student => {
        println("filter")
        student.sex == "女" && student.age > 22
      })

    println("3")

    filterRDD.foreach(println)

    println("4")

    //3、flatmap：一行转换多行
    val linesWordRDD: RDD[String] = sc.textFile("data/lines.txt")
    val wordsRDD: RDD[String] = linesWordRDD
      .flatMap(line => line.split(","))
    wordsRDD.foreach(println)
  }
}