package com.demo.spark;

import java.util.Arrays;

import org.apache.spark.SparkConf;
import org.apache.spark.sql.Dataset;
import org.apache.spark.sql.Row;
import org.apache.spark.sql.SparkSession;
import org.apache.spark.streaming.Durations;
import org.apache.spark.streaming.api.java.JavaDStream;
import org.apache.spark.streaming.api.java.JavaPairDStream;
import org.apache.spark.streaming.api.java.JavaReceiverInputDStream;
import org.apache.spark.streaming.api.java.JavaStreamingContext;

import scala.Tuple2;

public class SparkDemo {
	
	public static void demoSparkStream() throws InterruptedException {
		SparkConf conf = new SparkConf();
		conf.setAppName("NetworkWordCount");
		conf.setMaster("spark://192.168.6.131:7077");
		@SuppressWarnings("resource")
		JavaStreamingContext jsc = new JavaStreamingContext(conf, Durations.seconds(1));
		JavaReceiverInputDStream<String> lines = jsc.socketTextStream("192.168.6.131", 9999);
		JavaDStream<String> words = lines.flatMap(s -> Arrays.asList(s.split(" ")).iterator());
		JavaPairDStream<String, Integer> wordToPair = words.mapToPair(s -> new Tuple2<>(s, 1));		
		JavaPairDStream<String, Integer> wordCounts = wordToPair.reduceByKey((v1, v2) -> v1 + v2);
		wordCounts.print();
		jsc.start();
		jsc.awaitTermination();
	}

	public static void main(String[] args) {
		SparkSession sparkSession = SparkSession.builder().appName("SparkSQL").master("spark://192.168.6.131:7077")
				.getOrCreate();
		Dataset<Row> json = sparkSession.read().json("/test.json");
		json.show();
	}

}
