package cn.spark.study.core;

import org.apache.spark.SparkConf;
import org.apache.spark.api.java.JavaRDD;
import org.apache.spark.api.java.JavaSparkContext;

import java.sql.SQLOutput;

/**
 * RDD持久化
 */
public class Persist {

    public static void main(String[] args) {
        SparkConf conf = new SparkConf().setAppName("Persist").setMaster("local");
        JavaSparkContext sc = new JavaSparkContext(conf);

        String path = "K:\\Spark从入门到精通（Scala编程、案例实战、高级特性、Spark内核源码剖析、Hadoop高端）\\第36讲-Spark核心编程：RDD持久化详解\\文档\\spark.txt";
        JavaRDD<String> lines = sc.textFile(path).cache();

        long beginTime = System.currentTimeMillis();

        long count = lines.count();
        System.out.println("count = " + count);

        long endTime = System.currentTimeMillis();
        System.out.println("cost " + (endTime - beginTime) + " milliseconds");

        beginTime = System.currentTimeMillis();

        count = lines.count();
        System.out.println("count = " + count);

        endTime = System.currentTimeMillis();
        System.out.println("cost " + (endTime - beginTime) + " milliseconds");

        sc.close();
    }

}
