package com.niit.spark.rdd.test

import org.apache.spark.rdd.RDD
import org.apache.spark.{SparkConf, SparkContext}

/**
 * Date:2025/4/28
 * Author：Ys
 * Description:
 */
object MapExercise {
  def main(args: Array[String]): Unit = {
    val sparkConf = new
        SparkConf().setMaster("local[*]").setAppName("MapExercise")
    val sc = new SparkContext(sparkConf)
   sc.setLogLevel("ERROR")
    val salesRDD = sc.parallelize(Seq("ProductA,100.5", "ProductB,200.8",
      "ProductA,150.2", "ProductC,300.0"))

    val resRdd: RDD[(String, Double)] = salesRDD.map(line => {
      //切割，拿到销售额
      val parts: Array[String] = line.split(",")
      (parts(0), parts(1).toDouble)

    })

    resRdd.collect().foreach(println)


    sc.stop()
  }
}
