package com.weiron.rdd.demo;

import com.weiron.rdd.entity.User;
import org.apache.spark.SparkConf;
import org.apache.spark.api.java.JavaPairRDD;
import org.apache.spark.api.java.JavaRDD;
import org.apache.spark.api.java.JavaSparkContext;
import org.apache.spark.api.java.function.MapPartitionsFunction;
import org.apache.spark.sql.*;
import org.apache.spark.sql.streaming.OutputMode;
import org.apache.spark.sql.streaming.StreamingQuery;
import org.apache.spark.sql.streaming.StreamingQueryException;
import org.apache.spark.sql.types.StructType;
import scala.Tuple2;

import java.util.ArrayList;
import java.util.Arrays;
import java.util.List;

/**
 * @author: weiRon
 * @description RDD相关api
 * 1.数据分析平台提供接口供其他系统调用写入日志
 * 2.flume收集日志用kafka做sink
 * 3.spark分析日志，结果输出HBase
 * @date: 2023/4/19 11:04
 */
public class RddDemo {
    public static void main(final String[] args) {
        JavaSparkContext sc = new JavaSparkContext("local", "WordCount");
        List<String> dataList1 = new ArrayList<>();
        dataList1.add("Hello world of spark");
        dataList1.add("I need to learn it");
        dataList1.add("Spark is necessary");
        dataList1.add("I create a test file");
        JavaRDD<String> dataRDD1 = sc.parallelize(dataList1);

        List<String> dataList2 = new ArrayList<>();
        dataList2.add("testUnion");
        dataList2.add("I am trying");
        dataList2.add("another dataList");
        dataList2.add("I create a test file");
        JavaRDD<String> dataRDD2 = sc.parallelize(dataList2);
        //union
        JavaRDD<String> dataRDD = dataRDD1.union(dataRDD2);
        //map
        JavaRDD<String> mapRDD = dataRDD.map(s -> s + " end");
        //flatmap
        JavaRDD<String> flatMapRDD = mapRDD.flatMap(s -> Arrays.asList(s.split(" ")).iterator());
        //filter
        JavaRDD<String> filterRDD = flatMapRDD.filter(s -> !s.equals("I"));
        //distinct
//        JavaRDD<String> distinctRDD = filterRDD.distinct();
//        JavaPairRDD<String, Integer> pairRDD = distinctRDD.mapToPair(s -> new Tuple2(s, 1));
        //计数
        JavaPairRDD<String, Integer> pairRDD = filterRDD.mapToPair(s -> new Tuple2(s, 1));
        //聚合
        JavaPairRDD<String, Integer> reducePairRDD = pairRDD.reduceByKey((x, y) -> x + y);
        //System.out.println(flatMapRDD.first());
        System.out.println(reducePairRDD.collect());

    }
}