package com.teamF.bridge.action

import java.sql.{Connection, DriverManager, PreparedStatement}

import com.teamF.bridge.util.{ConnectionPool, FormatTimeEight}
import org.apache.kafka.common.serialization.StringDeserializer
import org.apache.spark.SparkConf
import org.apache.spark.streaming.kafka010.{ConsumerStrategies, KafkaUtils, LocationStrategies}
import org.apache.spark.streaming.{Seconds, StreamingContext}

import scala.collection.mutable.ArrayBuffer

object DataCollect {

  /*
  过滤收集kafka队列里面的数据写入数据库中
   */
  def main(args: Array[String]): Unit = {
    val sparkConf = new SparkConf().setAppName("DataCollect").setMaster("spark://ycspark-1:7077")
    val ssc = new StreamingContext(sparkConf, Seconds(1))
    val topics = Array("bridgedata")
    val kafkaParams = Map(
      "bootstrap.servers" -> "10.2.2.80:9092,10.2.2.105:9092,10.2.2.114:9092",
      "key.deserializer" -> classOf[StringDeserializer],
      "value.deserializer" -> classOf[StringDeserializer],
      "group.id" -> "bridgedemo",
      "auto.offset.reset" -> "latest",
      "enable.auto.commit" -> (false: java.lang.Boolean)
    )
    val stream = KafkaUtils.createDirectStream[String, String](ssc, LocationStrategies.PreferBrokers, ConsumerStrategies.Subscribe[String, String](topics, kafkaParams))
    //获取kafka中topic中的数据,value是一个dstream类型，存放一系列rdd
    val value = stream.map(_.value())
    var conn: Connection = null
    var ps: PreparedStatement = null

    //遍历dstream中每一个中的rdd，并对该rdd中的数据进行处理
    //每个rdd中存放的实际上就是一秒内产生的数据
    //处理每个rdd然后进行处理产生一条数据
    //对每个rdd产生的数据存放到数据库中
    value.foreachRDD { rdd => // Loop over RDD
      //声明字段方便后面插入数据库,每使用完一次rdd就要重置
      var collect_time = ""
      var paraid = ""
      var para_type_code = ""
      var para_value = 0.0
      var system_time = ""

      //判断rdd时候有数据，如果有就进行处理// RDD has data
      val x = rdd.count
      if (x > 0) {
        //var count: Int = 0
        //用来存放每条桥梁数据
        val srcList = new ArrayBuffer[BridgeSensor]()

        //将每个rdd里面的数据进行处理// Look for each record in the RDD
        for (line <- rdd.collect.toArray) {
          //记录每个rdd中的记录数，方便求平均值
          //count += 1
          val index: Array[String] = line.split(" ")
          //在同一秒内的collect_time字段相同
          collect_time = index(0) + (" " + index(1).split("\\.")(0))
          //对小时进行加8处理
          collect_time = FormatTimeEight.formatTimeEight(collect_time)+ (".000000")
          paraid = index(4)
          para_type_code = index(5)
          para_value = index(6).toDouble
          system_time = collect_time
         val bs = new BridgeSensor(paraid,para_type_code,para_value)
          srcList.append(bs)
        }
//        println("======================srcList=========================")
//        for(src <- srcList){
//          println(collect_time+"\t"+src.paraid+"\t"+src.para_type_code+"\t"+src.para_value+"\t"+system_time)
//        }
//        println("===================finalList============================")
        //对srcList集合中数据进行分组处理，得到的是根据paraid进行分组的数据
          val finalList = BridgeSensor.getFinalList(srcList)
//        for(result <- finalList){
//          println(collect_time+"\t"+result.paraid+"\t"+result.para_type_code+"\t"+result.para_value+"\t"+system_time)
//        }

        //conn = null
        //ps = null

        //将finalList集合中每隔对象插入数据库中

        conn = ConnectionPool.getConnection
        if(conn == null){
          println("获取连接为空，重新创建连接")
          conn = DriverManager.getConnection("jdbc:mysql://112.74.40.18:3306/bridge?useUnicode=true&characterEncoding=utf8&useServerPrepStmts=false&rewriteBatchedStatements=true", "root", "123456")
        }
        //设置手动开启事务
        conn.setAutoCommit(false)
        val sql = "insert into sensor_data(collect_time,paraid,para_type_code,para_value,system_time) values(?,?,?,?,?)"
        ps = conn.prepareStatement(sql)
        for(bs <- finalList){

          ps.setString(1, collect_time)
          ps.setString(2, bs.paraid)
          ps.setString(3, bs.para_type_code)
          ps.setDouble(4, bs.para_value)
          ps.setString(5, system_time)
          ps.addBatch()
          //将每次处理后的值打印到控制台，仅供测试用
          //println("每秒产生处理之后的值：\n")
          //println(collect_time+"\t"+bs.paraid+"\t"+bs.para_type_code+"\t"+bs.para_value+"\t"+system_time)
          /*
          try {
            val sql = "insert into sensor_data(collect_time,paraid,para_type_code,para_value,system_time) values(?,?,?,?,?)"
            //采用数据库连接池进行连接
            //加载驱动
            //Class.forName("com.mysql.jdbc.Driver")
            //conn = DriverManager.getConnection("jdbc:mysql://112.74.40.18:3306/bridge?useUnicode=true&characterEncoding=utf8", "root", "123456")
            //conn = ConnectionPool.getConnection
            //设置手动开启事务
            //conn.setAutoCommit(false)
            ps = conn.prepareStatement(sql)
            ps.setString(1, collect_time)
            ps.setString(2, bs.paraid)
            ps.setString(3, bs.para_type_code)
            ps.setDouble(4, bs.para_value)
            ps.setString(5, system_time)
            ps.addBatch()
            //ps.executeUpdate()
            //conn.commit()
          } catch {
            case e: Exception => {
              println(e)
              //回滚
              conn.rollback()
            }
          } finally {
            //归还连接
            ConnectionPool.returnConnection(conn)
          }
          */
        }

        ps.executeBatch() // 执行批量处理
        conn.commit()  // 提交
        ps.clearBatch()//清空ps中积攒的sql语句
        ConnectionPool.returnConnection(conn)

      }
    }
    ssc.start()
    ssc.awaitTermination()

    //关闭连接

    //ConnectionPool.closePreparedStatement(ps)
    //ConnectionPool.closeConnection(conn)
  }
}
