package com.shujia.spark

import org.apache.spark.rdd.RDD
import org.apache.spark.{SparkConf, SparkContext}

object Demo9Join {
  def main(args: Array[String]): Unit = {

    /**
      * map  传入一行返回一行
      *
      */
    //创建sparkp配置文件对象
    val conf = new SparkConf().setAppName("map").setMaster("local")

    //上下文对象
    val sc = new SparkContext(conf)

    val studentRDD = sc.textFile("spark/data/students.txt")
    val scoreRDD = sc.textFile("spark/data/score.txt")

    /**
      * join  只有kv个的RDD才有  通过key进行关联
      *
      */
    val studentKvRDD: RDD[(String, String)] = studentRDD.map(line => {
      val split = line.split(",")
      val id = split(0)
      (id, line)
    })

    val scoreKvRDD: RDD[(String, String)] = scoreRDD.map(line => {
      val split = line.split(",")
      val id = split(0)
      (id, line)
    })

    val joinRDD: RDD[(String, (String, String))] = studentKvRDD.join(scoreKvRDD)


    joinRDD.foreach(println)


  }
}
