package com.shengzai.stream

import com.alibaba.fastjson.{JSON, JSONObject}
import org.apache.spark.streaming.dstream.{DStream, ReceiverInputDStream}
import org.apache.spark.streaming.{Durations, StreamingContext}
import org.apache.spark.{SparkConf, SparkContext, util}

import java.sql.{Driver, DriverManager}
import java.text.SimpleDateFormat
import java.util.Date







object Demo6CarStream {
  def main(args: Array[String]): Unit = {

    val conf = new SparkConf()

    conf.setMaster("local[2]")

    conf.setAppName("car")

    val sc = new SparkContext(conf)

    val ssc = new StreamingContext(sc, Durations.seconds(5))

    ssc.checkpoint("data/checkpoint")

    val RD: ReceiverInputDStream[String] = ssc.socketTextStream("192.168.146.136", 8888)

    val acc: util.LongAccumulator = sc.longAccumulator

    val resRDD: DStream[(String, String, Int, Double)] = RD.map(
      line => {
        acc.add(1)
        val jsonLine: JSONObject = JSON.parseObject(line)
        val id: String = jsonLine.getString("road_id")
        val speed: Double = jsonLine.getDouble("speed")
        (id, (speed, 1))
      }
    ).reduceByKeyAndWindow(
      (x, y) => (x._1 + y._1, x._2 + y._2),
      windowDuration = Durations.seconds(20),
      slideDuration = Durations.seconds(10)
    ).map {
      case (id: String, (speed: Double, count: Int)) =>
        val date = new Date()
        val format = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss")
        val time: String = format.format(date)
        val avgSpeed: Double = speed / count
        (id, time, count, avgSpeed)
    }

    ssc.start()
    ssc.awaitTermination()
    ssc.stop()

  }

}
