package com.shujia.core

import org.apache.spark.rdd.RDD
import org.apache.spark.{SparkConf, SparkContext}

object Code36RDDInRDD {
  def main(args: Array[String]): Unit = {
    /**
     * RDD 算子内部不能使用另外一个RDD，但是RDD的变量可以在类型相同的情况下，进行重新赋值 例如如下：
     * lastPrRes = currentPRRes.map {
     * case (page, (pr, outPageList)) => {
     * (page, outPageList, pr)
     * }
     * }
     */

    val conf: SparkConf = new SparkConf().setMaster("local[*]").setAppName("LogDataCompute")
    val sc = new SparkContext(conf)

    val listRDD1: RDD[Int] = sc.parallelize(
      List(1, 2, 3, 4)
    )

    val listRDD2: RDD[Int] = sc.parallelize(
      List(7, 8, 9, 10)
    )

    listRDD2.map {
      case num1 => {
        val resList: List[Int] = listRDD1.map {
          case num2 => {
            num1 * num2
          }
        }.collect().toList
        resList
      }
    }.foreach(println)
  }
}
