package com.ztjy.demo;

import com.google.common.collect.Lists;
import com.ztjy.dao.HiveDAO;
import com.ztjy.dao.MySparkSession;
import lombok.extern.log4j.Log4j2;
import org.apache.spark.api.java.JavaPairRDD;
import org.apache.spark.api.java.JavaRDD;
import org.apache.spark.api.java.JavaSparkContext;
import org.apache.spark.api.java.function.PairFlatMapFunction;
import org.apache.spark.api.java.function.PairFunction;
import org.apache.spark.api.java.function.VoidFunction;
import org.apache.spark.sql.Dataset;
import org.apache.spark.sql.Row;
import scala.Tuple2;

import java.util.List;


@Log4j2
public class JavaRddDemo {
    public static void test() {
        Dataset<Row> aa = HiveDAO.getUserRelation();
        aa.show();
        aa.toJavaRDD().take(10);
    }

    public static void testJoin() {
        List<String> list = Lists.newArrayList();
        list.add("1");
        list.add("2");
        list.add("3");
        JavaSparkContext context = JavaSparkContext.fromSparkContext(MySparkSession.getSparkSession().sparkContext());
        JavaRDD<String> rdd1 = context.parallelize(list);
        JavaPairRDD<String, String> pair1 = rdd1.mapToPair((PairFunction<String, String, String>) s -> new Tuple2<>(s, s));

        list = Lists.newArrayList();
        list.add("3");
        list.add("4");
        list.add("5");
        JavaRDD<String> rdd2 = context.parallelize(list);
        JavaPairRDD<String, String> pair2 = rdd2.mapToPair((PairFunction<String, String, String>) s -> new Tuple2<>(s, s));
        JavaPairRDD<String, Tuple2<String, String>> pair3 = pair1.join(pair2);
        JavaPairRDD<String, Tuple2<String, String>> pair4 = pair2.join(pair1);
        log.info(pair3.collect());
        log.info(pair4.collect());
    }
}
