package SparkStreaming;

import org.apache.log4j.Level;
import org.apache.log4j.Logger;
import org.apache.spark.*;
import org.apache.spark.api.java.JavaRDD;
import org.apache.spark.api.java.function.Function2;
import org.apache.spark.sql.DataFrameNaFunctions;
import org.apache.spark.sql.Dataset;
import org.apache.spark.sql.Row;
import org.apache.spark.sql.SparkSession;
import org.apache.spark.streaming.*;
import org.apache.spark.streaming.api.java.*;

/**
 * sufficient 充足的充分的
 * unaware 无意的
 * asynchronous
 */
import java.util.Arrays;

/**
 * @program: MySpark
 * @description
 * @author: tkk fendoukaoziji
 * @create: 2019-04-23 17:51
 **/
public class SparkStreamingDataFrameAndSql {
    public static void main(String[] args) {
        Logger.getLogger("org.apache.spark").setLevel(Level.WARN); //设置日志级别warn
        SparkConf conf = new SparkConf().setAppName("repeatSparkStreaming").setMaster("local[*]");
        JavaStreamingContext ssc = new JavaStreamingContext(conf, Durations.seconds(1));
        ssc.checkpoint("text");
        JavaReceiverInputDStream<String> lines = ssc.socketTextStream("node3", 9993);
        JavaDStream<String> words = lines.flatMap(x -> Arrays.asList(x.split(" ")).iterator());

        words.foreachRDD((rdd, time) -> {
            //get the singleton instance of SparkSession
            SparkSession spark = JavaSparkSessionSingleton.getInstance(rdd.context().getConf());
            JavaRDD<WordRow> javaRowRDD = rdd.map(word -> {
                WordRow record=new WordRow();
                record.setWord(word);
                return  record;
            });
            Dataset<Row> dataFrame = spark.createDataFrame(javaRowRDD, WordRow.class);
            dataFrame.createOrReplaceTempView("wordTable");
            Dataset<Row> wordDataFrame = spark.sql("select word,count(*) as total from wordTable group by word");
            wordDataFrame.show();
        });
        ssc.start();
        try {
            ssc.awaitTermination();
        } catch (InterruptedException e) {
            e.printStackTrace();
        }
    }
}
class JavaSparkSessionSingleton{
    private static transient SparkSession instance=null;
    public static  SparkSession getInstance(SparkConf conf){
        if(instance==null){
            instance = SparkSession
                    .builder()
                    .config(conf)
                    .getOrCreate();
        }
        return instance;
    }
}