package com.shujia.spark.core

import org.apache.spark.rdd.RDD
import org.apache.spark.{SparkConf, SparkContext}

object Demo9Join {

  def main(args: Array[String]): Unit = {

    val conf: SparkConf = new SparkConf()
      .setAppName("join")
      .setMaster("local")

    val sc = new SparkContext(conf)

    val students: RDD[String] = sc.textFile("data/students1.txt")
    val score: RDD[String] = sc.textFile("data/score1.txt")

    //转换成kv格式

    val kvStudent: RDD[(String, String)] =students.map{
      case stu: String =>
      val str: Array[String] = stu.split(",")
        (str(0),stu)
    }

   val kvScore: RDD[(String, String)] =  score.map{
      case (sco: String) =>
        val str: Array[String] = sco.split(",")
        (str(0),sco)
    }

    /**
      * join: 默认是内连接
      * 通过key进行关联
      */

    val innerJoinRDD: RDD[(String, (String, String))] = kvStudent.join(kvScore)

    //innerJoinRDD.saveAsTextFile("data/2")

    /**
      * leftOuterJoin: 一左边为基础，如果右边没有用null 代替
      *
      */

    val leftJoinRDD: RDD[(String, (String, Option[String]))] = kvStudent.leftOuterJoin(kvScore)

    val leftOuterResultRDD = leftJoinRDD.map{
      case (id: String, (studentinfo: String,Some(scoreInfo))) =>
        studentinfo + "\t" + scoreInfo

      case (id: String, (studentinfo: String,None)) =>
        studentinfo + "\t" + "默认值"
    }
    //leftOuterResultRDD.foreach(println)

    /**
      * fullOuterJoin
      */

    val fullJoin: RDD[(String, (Option[String], Option[String]))] = kvStudent.fullOuterJoin(kvScore)

    fullJoin.foreach(println)

  }

}
