package com.hsj;

import org.apache.spark.api.java.JavaPairRDD;
import org.apache.spark.api.java.JavaSparkContext;
import scala.Tuple2;

import java.util.Arrays;

/**
 * 描述：pairRDD 连接操作
 * 创建者： hansh
 * 创建日期：2018/11/1 15:29
 * 版本：1.0
 * 修改者：
 * 修改日期：
 */
public class BasicJoin {


    public static void main(String args[]) {
        String master;
        if (args.length > 0) {
            master = args[0];
        } else {
            master = "local";
        }

        JavaSparkContext sc = new JavaSparkContext(
                master, "BasicJoin", System.getenv("SPARK_HOME"), System.getenv("JARS"));
        JavaPairRDD<Integer, Integer> rdd1 = sc.parallelizePairs(Arrays.asList(new Tuple2(1, 2), new Tuple2(3, 4), new Tuple2(3, 6)));
        JavaPairRDD<Integer, Integer> rdd2 = sc.parallelizePairs(Arrays.asList(new Tuple2(1, 5),new Tuple2(4,7), new Tuple2(3, 8), new Tuple2(3, 9)));

        System.out.println(rdd2.collect());
        System.out.println(rdd2.sortByKey(true).collect());
        System.out.println(rdd2.sortByKey(false).collect());
        System.out.println(rdd2.collectAsMap());

        System.out.println("join result is :"+rdd1.join(rdd2).collect());
        System.out.println("leftOuterJoin result is :"+rdd1.leftOuterJoin(rdd2).collect());
        System.out.println("rightOuterJoin result is :"+rdd1.rightOuterJoin(rdd2).collect());
        System.out.println("fullOuterJoin result is :"+rdd1.fullOuterJoin(rdd2).collect());
    }
}
