package com.atguigu.bigdata.streaming

import org.apache.spark.SparkConf
import org.apache.spark.streaming.{Seconds, StreamingContext}
import org.apache.spark.sql.{DataFrame, SparkSession}
//创建一个样例类
case class People(name:String,age:Int)

object SparkStreamingStudy_rddtosql1 {
  def main(args: Array[String]): Unit = {
    //1.初始化Spark配置信息
    val sparkConf = new SparkConf().setMaster("local[*]").setAppName("StreamWordCount")
    //2.初始化SparkStreamingContext
    val ssc = new StreamingContext(sparkConf, Seconds(10))

    //3.通过监控端口创建DStream，读进来的数据为一行行
    val lineStreams = ssc.socketTextStream("10.21.13.181", 9999)
    //sparkSession
    val spark = SparkSession.builder().config(sparkConf).getOrCreate()
    //为了toDF隐式转换
    import spark.implicits._
    //wordStreams类型可以通过foreachRDD转换成一个一个RDD
    lineStreams.foreachRDD(rdd=>{
      //将rdd转换为DF
      //rddTo  =>rdd[People]
      val rddTo = rdd.map{x=>val param = x.split(" ");
        People(param(0),param(1).trim.toInt)}
      val rddToDF = rddTo.toDF
      rddToDF.createOrReplaceTempView("people")
      val wordcountDF = spark.sql("select name,age from people")
      wordcountDF.show()
    })
    ssc.start()
    ssc.awaitTermination()
  }
    case class People(name:String,age:Int)
}

