package com.song.sparkstudy.stream;

import java.io.Serializable;
import java.util.Arrays;

import org.apache.spark.SparkConf;
import org.apache.spark.SparkContext;
import org.apache.spark.api.java.JavaRDD;
import org.apache.spark.api.java.StorageLevels;
import org.apache.spark.api.java.function.FlatMapFunction;
import org.apache.spark.api.java.function.Function;
import org.apache.spark.api.java.function.Function2;
import org.apache.spark.examples.streaming.JavaRecord;
import org.apache.spark.sql.DataFrame;
import org.apache.spark.sql.SQLContext;
import org.apache.spark.sql.functions;
import org.apache.spark.streaming.Durations;
import org.apache.spark.streaming.Time;
import org.apache.spark.streaming.api.java.JavaDStream;
import org.apache.spark.streaming.api.java.JavaReceiverInputDStream;
import org.apache.spark.streaming.api.java.JavaStreamingContext;

public class JavaSqlWordCount {
	
	public static void main(String[] args) {
		
		SparkConf conf = new SparkConf().setAppName("net work cout").setMaster("local[2]");
		
		JavaStreamingContext context =  new JavaStreamingContext(conf ,Durations.seconds(20));
		
		JavaReceiverInputDStream<String> lines = context.socketTextStream("localhost", 8888,StorageLevels.MEMORY_AND_DISK);
		
		JavaDStream<String> words   = lines.flatMap(new FlatMapFunction<String, String>() {

			@Override
			public Iterable<String> call(String str) throws Exception {
				// TODO Auto-generated method stub
				return Arrays.asList( str.split(" "));
			}
			
		});
		
		words.foreach(new Function<JavaRDD<String>, Void>() {
			
			@Override
			public Void call(JavaRDD<String> rdd) throws Exception {
				
				SQLContext sqlContext = JavaSQLContextSingleton.getInstance(rdd.context());
				JavaRDD<JavaRecord> rowrdd = rdd.map(new Function<String, JavaRecord>() {

					@Override
					public JavaRecord call(String str) throws Exception {
						JavaRecord record = new JavaRecord();
						record.setWord(str);
						return record;
					}
					
				});
				
				DataFrame wordsDataFrame = sqlContext.createDataFrame(rowrdd, JavaRecord.class);
				
				wordsDataFrame.registerTempTable("words");
				
				DataFrame wordcountFrame = sqlContext.sql("select word ,count(*) as total from  words group by word");
				System.out.println(" sql result is :----------");
				wordcountFrame.show();
				return null;
			}
		});
		
		
		context.start();
		context.awaitTermination();
	}
}


class JavaSQLContextSingleton{
	
	static private transient SQLContext instance = null;
	
	static public SQLContext getInstance(SparkContext context)
	{
		if(instance ==null)
		{
			instance = new SQLContext(context);
		}
		return instance;
	}
}

