package com.neo.base.C03_program

import org.apache.spark.SparkConf
import org.apache.spark.SparkContext
import org.apache.spark.SparkContext._

import com.typesafe.scalalogging.slf4j.Logger
import org.slf4j.LoggerFactory


class C03_05_RddBaseOpera {


    val logger = Logger(LoggerFactory.getLogger("C03_05_RddBaseOpera"))

    def flatMapTest(): Unit = {
      val conf = new SparkConf().setMaster("local[8]").setAppName("flatMapTest")
      val sc = new SparkContext(conf)

      val lines = sc.parallelize(List("hello world", "hi"))

      val words1 = lines.map(s => s + "o")
      words1.take(2).foreach(println)

      val words = lines.flatMap(line => line.split(" "))
      words.take(2).foreach(println)

      logger.info(words.first())


      val intcounts = sc.parallelize(List(1, 2, 3, 3))

      val intcount1 = intcounts.flatMap(a => a to 3)
      intcount1.take(10).foreach(println)



      val intcount2 = intcounts.distinct()
      println("----------------------------")
      intcount2.take(10).foreach(println)

      
      logger.info("笛卡尔集")
      val a1 = sc.parallelize(List(1,2,3))
      val b1 = sc.parallelize(List(5,6,7))


      val a1b1 = a1.cartesian(b1)

      a1b1.take(100).foreach(println)

      logger.info("行动操作")


     val c1 =  a1.reduce((x,y) => x + y)
     println(c1)











    }


}

object C03_05_RddBaseOpera{


  def main(args: Array[String]): Unit = {
      val rddbo = new C03_05_RddBaseOpera
      rddbo.flatMapTest()
  }

}
