package com.cluster.user.config

import org.apache.spark.{SparkConf, SparkContext}

object FieldJoinExample {
  def main(args: Array[String]): Unit = {
    val conf = new SparkConf().setAppName("FieldJoinExample").setMaster("local")
    val sc = new SparkContext(conf)

    // 示例左表 RDD：键为 String，包含三个字段 (key, leftField1, leftField2)
    val leftRDD = sc.parallelize(Seq(
      ("www.baidu.com", 1, 100),
      ("www.zhihu.com", 2, 200),
      ("www.haha.com", 3, 300)
    ))

    // 示例右表 RDD：键为 String，包含两个字段 (key, rightField)
    val rightRDD = sc.parallelize(Seq(
      ("www.baidu.com", 1000),
      ("www.huxiu.com", 2000),
      ("www.36ke.com", 3000)
    ))

    // 将 leftRDD 转换成 (key, (leftField1, leftField2)) 格式
    val leftKeyValueRDD = leftRDD.map { case (key, leftField1, leftField2) => (key, (leftField1, leftField2)) }

    // 将 rightRDD 转换成 (key, rightField) 格式
    val rightKeyValueRDD = rightRDD.map { case (key, rightField) => (key, rightField) }

    // 进行左连接
    val joinedRDD = leftKeyValueRDD.leftOuterJoin(rightKeyValueRDD)

    // 选择所需字段：leftRDD 的 leftField2 和 rightRDD 的 rightField
    val resultRDD = joinedRDD.map {
      case (key, ((_, leftField2), rightFieldOpt)) =>
        val rightField = rightFieldOpt.getOrElse(0)  // 处理右表无匹配情况
        (key, leftField2, rightField)
    }

    // 输出结果
    resultRDD.collect().foreach {
      case (key, leftField2, rightField) =>
        println(s"key: $key, leftField2: $leftField2, rightField: $rightField")
    }

    sc.stop()
  }
}
