package com.shujia.spark.stream

import java.sql.{Connection, DriverManager, PreparedStatement}
import java.text.SimpleDateFormat
import java.util.Date

import com.alibaba.fastjson.{JSON, JSONObject}
import org.apache.spark.streaming.dstream.{DStream, ReceiverInputDStream}
import org.apache.spark.streaming.{Durations, StreamingContext}
import org.apache.spark.{SparkConf, SparkContext}

object Demo6Card {
  def main(args: Array[String]): Unit = {

    /**
      *
      * 1、统计每一个卡口车流量，统计最近5分钟，每隔1分钟统计一次
      */
    val conf = new SparkConf()
    //提交到服务器运行需要注释
    //conf.setMaster("local[2]")
    conf.setAppName("card")

    val sc = new SparkContext(conf)

    val ssc = new StreamingContext(sc, Durations.seconds(5))

    //提交到服务器运行改成hdfs中的路径
    ssc.checkpoint("/data/checkpoint")


    //1、读取卡口过车数据
    val cardDS: ReceiverInputDStream[String] = ssc.socketTextStream("master", 8888)


    //2、使用FastJson解析数据，取出卡口编号
    val kvDS: DStream[(Long, Int)] = cardDS.map(car => {
      //将json字符串转换成json对象
      val jsonObj: JSONObject = JSON.parseObject(car)
      val card: Long = jsonObj.getLong("card")
      (card, 1)
    })

    //3、统计卡口的车流量
    val cardFlowDS: DStream[(Long, Int)] = kvDS.reduceByKeyAndWindow(
      (x: Int, y: Int) => x + y,
      (i: Int, j: Int) => i - j,
      Durations.seconds(15),
      Durations.seconds(5)
    )

    //4、将数据保存到mysql中
    cardFlowDS.foreachRDD(rdd => {
      //一次循环一个分区的数据
      rdd.foreachPartition(iter => {
        /**
          * 只为每一个分区创建一个链接
          *
          */
        //1、加载驱动
        Class.forName("com.mysql.jdbc.Driver")
        //2、创建链接
        val con: Connection = DriverManager.getConnection("jdbc:mysql://master:3306/bigdata", "root", "123456")
        //3、编写sql
        val stat: PreparedStatement = con.prepareStatement("insert into card_flow(card,dateStr,flow) values(?,?,?)")

        //迭代器的foreach不是算子，部分内外
        iter.foreach {
          case (card: Long, flow: Int) =>
            //获取当前的计算时间
            val date = new Date()
            val format = new SimpleDateFormat("yyyy-MM-dd hh:mm:ss")
            val dateStr: String = format.format(date)

            //4、给指定设置值
            stat.setLong(1, card)
            stat.setString(2, dateStr)
            stat.setInt(3, flow)

            //5、执行查询
            stat.execute()
        }

        //关闭链接
        stat.close()
        con.close()
      })

    })

    ssc.start()
    ssc.awaitTermination()
    ssc.stop()


    /**
      * 提交任务
      * spark-submit --master yarn-client --class com.shujia.spark.stream.Demo6Card --num-executors 2 spark-1.0-jar-with-dependencies.jar
      *
      * 停止任务
      * yarn application -kill application_1652518534090_0028
      *
      */

  }

}
