package com.catmiao.rdd.operate;

import com.google.common.collect.Lists;
import org.apache.spark.SparkConf;
import org.apache.spark.api.java.JavaRDD;
import org.apache.spark.api.java.JavaSparkContext;
import scala.Tuple1;
import scala.Tuple2;

/**
 * @author ChengMiao
 * @title: Spark01_Operate
 * @projectName spark_study
 * @description: TODO
 * @date 2024/11/25 15:45
 */
public class Spark01_Operate {

    public static void main(String[] args) {
        final SparkConf conf = new SparkConf();
        conf.setAppName("spark");
        conf.setMaster("local[*]");

        final JavaSparkContext sc = new JavaSparkContext(conf);

        JavaRDD<Integer> rdd = sc.parallelize(Lists.newArrayList(1, 2, 3));

        /**
         * rdd 处理数据的分类
         * 1. 单值类型
         * 2. 键值类型
         *      - JDK1.8之后也存在元组，采用的特殊的类 TupleX
         */
        Tuple2<String, Integer> tuple = new Tuple2<String, Integer>("a",20);


        sc.close();
    }
}
