package com.dongline.te
import javafx.application.Application
import javafx.stage.Stage
import org.apache.spark.{SparkConf, SparkContext}
import org.apache.spark.streaming.{Seconds, StreamingContext}
class JoinOpt extends Application{
  override def start(primaryStage: Stage): Unit = {
    val conf = new SparkConf().setMaster("local[2]").setAppName("NetworkWordCount")
    //设置spark的内存
    conf.set("spark.testing.memory", "471859200")
    val sc = new SparkContext(conf)
    val rdd1=sc.parallelize(Array("q","w","e","r"));
    val rdd2=sc.parallelize(Array("a","w","e","b"));
    var rdd3=rdd1.map((_,1)).join(rdd2.map((_,1)))
    print(rdd3.collect.toBuffer)

  }
}
