package com.gy.spark.sparkstreaming

import org.apache.spark.sql.{DataFrame, SQLContext, SparkSession}
import org.apache.spark.streaming.{Durations, StreamingContext}
import org.apache.spark.{SparkConf, SparkContext}

object StreamingAndSql {

  def main(args: Array[String]): Unit = {
    val conf = new SparkConf()
    conf.setMaster("local[2]").setAppName("StreamingAndSql")
    val sc = new SparkContext(conf)
    val jsc = new StreamingContext(sc, Durations.seconds(5))

    val socketDStream = jsc.socketTextStream("localhost", 9999)


    socketDStream.foreachRDD(rdd => {
      //倒入隐士转换
      val spark = SparkSession.builder()
        .config(rdd.context.getConf)
        .getOrCreate()
      import spark.implicits._
      //将rdd转换为dataframe
      val df: DataFrame = rdd.toDF("word")
      df.show()
      //注册为一张表
      df.createOrReplaceTempView("tmp")
      //写sql执行操作
      val resultDf = spark.sql("select t.word,count(*) as g_count from tmp as t group by t.word")

      resultDf.show()

    })


    jsc.start()
    jsc.awaitTermination()
    jsc.stop(false)
  }

}
