package Streaming

import org.apache.spark.{SparkConf, SparkContext}
import org.apache.spark.rdd.RDD
import org.apache.spark.sql.{DataFrame, SparkSession}
import org.apache.spark.storage.StorageLevel
import org.apache.spark.streaming.dstream.{DStream, ReceiverInputDStream}
import org.apache.spark.streaming.{Seconds, StreamingContext, Time}

object SqlNetworkWordCount {

  def main(args: Array[String]): Unit = {
//    //StreamingExamples.setStreamingLogLevels()
//
//    // Create the context with a 2 second batch size
//    val sparkConf = new SparkConf().setMaster("local[2]").setAppName("SqlNetworkWordCount")
//    val sc = new SparkContext(sparkConf)
//    sc.setLogLevel("WARN")
//    val ssc = new StreamingContext(sc, Seconds(2))
//
//    // Create a socket stream on target ip:port and count the
//    // words in input stream of \n delimited text (eg. generated by 'nc')
//    // Note that no duplication in storage level only for running locally.
//    // Replication necessary in distributed scenario for fault tolerance.
//    val lines: ReceiverInputDStream[String] = ssc.socketTextStream("192.168.163.8", 9999, StorageLevel.MEMORY_AND_DISK_SER)
//    val words: DStream[String] = lines.flatMap(_.split(" "))
//
//    // Convert RDDs of the words DStream to DataFrame and run SQL query
//    words.foreachRDD { (rdd: RDD[String], time: Time) =>
//      // Get the singleton instance of SparkSession
//      val spark = SparkSessionSingleton.getInstance(rdd.sparkContext.getConf)
//      import spark.implicits._
//
//      // Convert RDD[String] to RDD[case class] to DataFrame
//      val wordsDataFrame: DataFrame = rdd.map(w => Record(w)).toDF()
//
//      // Creates a temporary view using the DataFrame
//      wordsDataFrame.createOrReplaceTempView("words")
//
//      // Do word count on table using SQL and print it
//      val wordCountsDataFrame: DataFrame =
//        spark.sql("select word, count(*) as total from words group by word")
//      println(s"========= $time =========")
//      wordCountsDataFrame.show()
//    }
//
//    ssc.start()
//    ssc.awaitTermination()
  }

}




