package com.spark.mysql

import java.text.SimpleDateFormat
import java.util.{Calendar, Properties}

import com.spark.mysql.insertfunction._
import org.apache.hadoop.hbase.util.Bytes
import org.apache.spark.sql.types.{IntegerType, StringType, StructField, StructType}
import org.apache.spark.sql.{DataFrame, Row, SQLContext, SaveMode}
import org.apache.spark.{SparkConf, SparkContext}

import scala.collection.mutable.ArrayBuffer

/**
  * @time 2020.5.4
  * @author hjn
  * 整合GetMovePoint和AbnormalAggregation
  */
object DataProcess {

  var judge: Int = 0
  var judge_bus: Int = 0
  var judge_metro: Int = 0

  // 定义一个 case class
  case class MobileData(imsi: String, longitude: Double, latitude: Double, time: String)

  /**
    * lateTS 当前数据库的最后一条时间戳
    * endTS 循环终止时间戳
    * lateChaeckTS 筛选序列的大端时间戳，符合开始筛选
    * earlyChaeckTS 筛选序列的小端时间戳
    *
    */
  var lastTS = "20181003000000"
  val endTS = "20181004000000"
  var lateCheckTS = "20181003000500"
  var earlyCheckTS = ""

  private val EARTH_RADIUS: Double = 6378.137 // 地球半径

  private def rad(d: Double): Double = d * Math.PI / 180.0

  /**
    * 通过经纬度获取距离(单位：米)
    *
    * @param lat1
    * @param lng1
    * @param lat2
    * @param lng2
    * @return
    */
  def getDistance(lat1: Double, lng1: Double, lat2: Double, lng2: Double): Double = {
    val radLat1: Double = rad(lat1)
    val radLat2: Double = rad(lat2)
    val a: Double = radLat1 - radLat2
    val b: Double = rad(lng1) - rad(lng2)
    var s: Double = 2 * Math.asin(Math.sqrt(Math.pow(Math.sin(a / 2), 2) + Math.cos(radLat1) * Math.cos(radLat2) * Math.pow(Math.sin(b / 2), 2)))
    s = s * EARTH_RADIUS
    s = s * 10000d.round / 10000d
    s = s * 1000
    s
  }

  /**
    * 求轨迹点2 到 轨迹点1和轨迹点3连成的直线的距离
    *
    * @param x1
    * @param y1
    * @param x2
    * @param y2
    * @param x3
    * @param y3
    * @return
    */
  def judgeDistance(x1: Double, y1: Double, x2: Double, y2: Double, x3: Double, y3: Double): Double = {
    val a_1_2 = getDistance(x1, y1, x2, y2)
    val b_2_3 = getDistance(x2, y2, x3, y3)
    val c_1_3 = getDistance(x1, y1, x3, y3)
    val p = (a_1_2 + b_2_3 + c_1_3) / 2 // 半周长
    val s = Math.sqrt(p * (p - a_1_2) * (p - b_2_3) * (p - c_1_3)) // 海伦公式求面积
    val distance = 2 * s / c_1_3
    distance
  }

  def main(args: Array[String]): Unit = {
    val conf = new SparkConf().setAppName("test").setMaster("local[*]")
    //    conf.set("spark.serializer", "org.apache.spark.serializer.KryoSerializer")
    val sc = new SparkContext(conf)
    sc.setLogLevel("ERROR")
    val spark = new SQLContext(sc)
    import spark.implicits._ // 引入spark.implicits,支持把一个RDD隐式转换为一个DataFrame

    /**
      * 设置模式信息
      */
    val schema = StructType(
      List(
        StructField("time", StringType, true),
        StructField("imsi", StringType, true),
        StructField("type", IntegerType, true)
      ))


    // 模式信息：move_analysis
    val schema_move_analysis = StructType(
      List(
        StructField("time", StringType, true),
        StructField("type", IntegerType, true),
        StructField("count", IntegerType, true)
      ))

    /**
      * 保存JDBC连接参数
      */
    val properties = new Properties()
    properties.setProperty("user", "root")
    properties.setProperty("password", "root")
    properties.put("driver", "com.mysql.jdbc.Driver") //表示驱动程序是com.mysql.jdbc.Driver

    /**
      * 读取出行方式静态数据(用于判断公交和地铁的出行方式)
      */
    var static_data = sc.textFile("file:///home/hadoop/Desktop/test_scala/data/static.csv")
    try {

      var clearStatement = conn.prepareCall("truncate table move_history")
      clearStatement.execute()

      clearStatement = conn.prepareCall("truncate table move_analysis")
      clearStatement.execute()

      while (lastTS.compareTo(endTS) < 0) {
        lastTS = lateCheckTS

        /**
          * 获取筛选区间
          */
        val sdf: SimpleDateFormat = new SimpleDateFormat("yyyyMMddHHmmss")
        var dt = sdf.parse(lateCheckTS)
        var rightNow: Calendar = Calendar.getInstance()
        rightNow.setTime(dt)
        rightNow.add(Calendar.MINUTE, -5)
        earlyCheckTS = sdf.format(rightNow.getTime())

        val result_Iterator = selectByTime(earlyCheckTS.toLong, lateCheckTS.toLong)
        var isCheck: Int = 0

        /**
          * 将新的数据插入临时表中
          */
        clearStatement = conn.prepareCall("truncate table tmpcount")
        clearStatement.execute()

        /**
          * 判断result_Iterator是否为空
          * 第一种判断方法,isCheck=1,则说明该result_Iterator有结果,可以切换到下一个时间段
          */
        /**
          * 获取检查区间中的数据
          */
        val dataPart = ArrayBuffer[String]()
        while (result_Iterator.hasNext) {
          isCheck = 1
          val data = result_Iterator.next()
          val time = Bytes.toString(data.getRow)
          val imsi = Bytes.toString(data.getValue(Bytes.toBytes("family"), Bytes.toBytes("imsi")))
          val laci = Bytes.toString(data.getValue(Bytes.toBytes("family"), Bytes.toBytes("laci")))
          val longitude = Bytes.toString(data.getValue(Bytes.toBytes("family"), Bytes.toBytes("longitude")))
          val latitude = Bytes.toString(data.getValue(Bytes.toBytes("family"), Bytes.toBytes("latitude")))

          // 用于保留8位小数,但是在hbase插入数据时,已经做了处理,因此,此处不用再保留8位小数
          //          longitude = longitude.toDouble.formatted("%.8f")
          //          latitude = latitude.toDouble.formatted("%.8f")
          val s = (time, longitude + '_' + latitude, imsi)
          insertToTmp(s)

          // 出行方式
          val line = imsi + "," + longitude + "," + latitude + "," + time
          dataPart.+=(line)
        }

        clearStatement = conn.prepareCall("truncate table current_count")
        clearStatement.execute()

        /**
          * 剔除出重复的imsi并计数
          */
        val selectsql = " select ll,count(*) as count from (select * from tmpcount where time in(select max(time) from tmpcount group by imsi) ) c group by ll;"
        val statement = conn.createStatement()
        var resultSet = statement.executeQuery(selectsql)
        while (resultSet.next()) {
          val time = lateCheckTS
          val LL = resultSet.getString("ll").split('_')
          val Logtitude = LL(0).toFloat
          val Latitude = LL(1).toFloat
          val count = resultSet.getInt("count") * 10
          var s = (time, Logtitude, Latitude, count)
          insertToCount(s)
          insertToHiscount(s)
        }


        /**
          * current_count表和compare表连表查询,并将异常聚集指数插入abnormal
          */
        clearStatement = conn.prepareCall("truncate table abnormal")
        clearStatement.execute()
        // 模糊连接,有改进空间
        val selectAbnormal = "select current_count.time,current_count.longitude,current_count.latitude,count,average from current_count,compare where current_count.time like concat('%',compare.time) and current_count.longitude = compare.longitude and current_count.latitude = compare.latitude"
        resultSet = statement.executeQuery(selectAbnormal)
        while (resultSet.next()) {
          val time = resultSet.getString("time")
          val longitude = resultSet.getDouble("longitude")
          val latitude = resultSet.getDouble("latitude")
          val count = resultSet.getFloat("count")
          val average = resultSet.getFloat("average")
          val rate = count * 80 / average
          val s = (time, longitude, latitude, count, rate)
          insertToAbnormal(s)
        }

        val dataArray = dataPart.toArray
        val dataRDD = sc.parallelize(dataArray)

        // 创建DataFrame
        val dataDF: DataFrame = dataRDD.map(_.split(",")).map(attributes =>
          MobileData(attributes(0), attributes(1).toDouble, attributes(2).toDouble, attributes(3))).toDF()
        dataDF.createOrReplaceTempView("mobile_data") // 注册为临时表,提供查询

        /**
          * 从原始数据中判断,每一段出行轨迹的出行方式
          */
        val user: Array[Row] = spark.sql("SELECT DISTINCT imsi FROM mobile_data").collect()
        val result = new ArrayBuffer[String]() // 存储出行方式数据
        if (user.length != 0) {
          for (i <- user) {
            val tempUser = i.getString(0)
            val data = spark.sql("SELECT longitude,latitude,time FROM mobile_data WHERE imsi=" + tempUser).collect()
            val dataIterator = data.iterator

            /**
              * 取出用户的数据,进行判断
              */
            var start: Row = dataIterator.next()
            var start_x: Double = start(0).toString.toDouble
            var start_y: Double = start(1).toString.toDouble
            var startTime: String = start(2).toString() // 注意,scala中Row对于每一列的类型都定为Any,需要显式类型转换
            while (dataIterator.hasNext) {
              // 存在连续的两行时,才能进行出行方式的判断
              val end: Row = dataIterator.next()
              val end_x = end(0).toString.toDouble
              val end_y = end(1).toString.toDouble
              val endTime = end(2).toString

              if (start_x == end_x && start_y == end_y) { // 步行
                println(lateCheckTS + " " + tempUser + " " + "步行")
                println()
                result.+=(lateCheckTS + " " + tempUser + " " + "2")

              } else {
                /**
                  * 计算时间差的方式
                  * 第一种:
                  */
                val sT = sdf.parse(startTime)
                val eT = sdf.parse(endTime)
                val tm1 = sT.getTime
                val tm2 = eT.getTime
                val needTime: Double = (tm2 - tm1) / (1000) // 以秒为单位

                /**
                  * 计算时间差的方式
                  * 第二种:
                  * scala> val df = Seq(("1/01/2017 12:01:00 AM","1/1/2017 12:05:00 AM")).toDF("time1","time2")
                  * df: org.apache.spark.sql.DataFrame = [time1: string, time2: string]
                  * *
                  * scala> val df2 = df.withColumn("time1",to_timestamp('time1,"d/MM/yyyy hh:mm:ss a")).withColumn("time2",to_timestamp('time2,"d/MM/yyyy hh:mm:ss a"))
                  * df2: org.apache.spark.sql.DataFrame = [time1: timestamp, time2: timestamp]
                  * *
                  * scala> df2.printSchema
                  * root
                  * |-- time1: timestamp (nullable = true)
                  * |-- time2: timestamp (nullable = true)
                  * *
                  * scala> df2.withColumn("diff_sec",unix_timestamp('time2)-unix_timestamp('time1)).withColumn("diff_min",'diff_sec/60).show(false)
                  * time1	time2	diff_sec	diff_min
                  * 2017-01-01 00:01:00	2017-01-01 00:05:00	240	4.0
                  */
                //              val timeDF = Seq(startTime,endTime).toDF("startTime","endTime")
                //              val timeDiff = timeDF.withColumn("startTime",to_timestamp("startTime","yyyyMMddHHmmss"))
                //                .withColumn("endTime",to_timestamp("endTime","yyyyMMddHHmmss"))

                val distance: Double = getDistance(start_x, start_y, end_x, end_y)
                val speed: Double = distance / needTime // 米每秒

                /**
                  * 通过速度大小,判断出行方式
                  */
                if (speed < 5) { // 步行
                  println(lateCheckTS + " " + tempUser + " " + "步行")
                  println()
                  result.+=(lateCheckTS + " " + tempUser + " " + "2")

                } else if (speed >= 5 && speed < 15) { // 骑行
                  println(lateCheckTS + " " + tempUser + " " + "骑行")
                  println()
                  result.+=(lateCheckTS + " " + tempUser + " " + "4")

                } else if (speed >= 15 && speed < 25) { // 公交 或 地铁
                  judge = 0
                  judge_bus = 0
                  judge_metro = 0

                  static_data.repartition(4)
                  static_data.foreachPartition(lines => {
                    for (line <- lines) {
                      val fields = line.split(",")
                      val longitude = fields(0)
                      val latitude = fields(1)
                      val mode = fields(2)
                      val judge_distance: Double = judgeDistance(start_x, start_y, longitude.toDouble, latitude.toDouble, end_x, end_y)
                      if (judge_distance <= 500) {
                        judge += 1
                        if (mode.equals("地铁")) {
                          judge_metro += 1
                        } else {
                          judge_bus += 1
                        }
                      }
                    }
                  })

                  if (judge >= 5) {
                    if (judge_bus > judge_metro) { // 公交
                      println(lateCheckTS + " " + tempUser + " " + "公交")
                      println()
                      result.+=(lateCheckTS + " " + tempUser + " " + "0")
                    } else {
                      // 地铁
                      println(lateCheckTS + " " + tempUser + " " + "地铁")
                      println()
                      result.+=(lateCheckTS + " " + tempUser + " " + "3")
                    }
                  } else { // 驾车
                    println(lateCheckTS + " " + tempUser + " " + "驾车")
                    println()
                    result.+=(lateCheckTS + " " + tempUser + " " + "1")
                  }

                } else if (speed >= 25) { // 驾车
                  println(lateCheckTS + " " + tempUser + " " + "驾车")
                  println()
                  result.+=(lateCheckTS + " " + tempUser + " " + "1")

                }
              }


              /**
                * 下一次迭代
                */
              start = end
              start_x = end_x
              start_y = end_y
              startTime = endTime
            }
          }

          /**
            * 插入MySQL数据库
            */
          clearStatement = conn.prepareCall("truncate table move_type")
          clearStatement.execute()

          val resultArray = result.toArray
          val resultRDD = sc.parallelize(resultArray).map(_.split(" "))
          val rowRDD = resultRDD.map(p => Row(p(0), p(1), p(2).toInt))
          val rowDF = spark.createDataFrame(rowRDD, schema)
          rowDF.write.mode(SaveMode.Append).jdbc("jdbc:mysql://localhost:3306/test?useSSL=false", "move_type", properties)
          rowDF.write.mode(SaveMode.Append).jdbc("jdbc:mysql://localhost:3306/test?useSSL=false", "move_history", properties)


          /**
            * 统计move_type，并放入move_analysis
            */
          val moveData: Array[Row] = spark.read.jdbc("jdbc:mysql://localhost:3306/test?useSSL=false", "(SELECT DISTINCT time FROM move_type) T", properties).collect()
          var move_analysis = new ArrayBuffer[String]()
          for (i <- moveData) {
            val move_time = i.toString().drop(1).dropRight(1) // 横坐标
            print(move_time)
            val bus = spark.read.jdbc("jdbc:mysql://localhost:3306/test?useSSL=false", "(SELECT imsi FROM move_type where time = " + move_time + " and type=0 ) T", properties).count
            val car = spark.read.jdbc("jdbc:mysql://localhost:3306/test?useSSL=false", "(SELECT imsi FROM move_type where time = " + move_time + " and type = 1 ) T", properties).count
            val walking = spark.read.jdbc("jdbc:mysql://localhost:3306/test?useSSL=false", "(SELECT imsi FROM move_type where time = " + move_time + " and type = 2 ) T", properties).count
            val subway = spark.read.jdbc("jdbc:mysql://localhost:3306/test?useSSL=false", "(SELECT imsi FROM move_type where time = " + move_time + " and type = 3 ) T", properties).count
            val biking = spark.read.jdbc("jdbc:mysql://localhost:3306/test?useSSL=false", "(SELECT imsi FROM move_type where time = " + move_time + " and type = 4 ) T", properties).count

            val move_analysisRDD = spark.sparkContext.parallelize(Array(
              move_time + " 0 " + bus.toString,
              move_time + " 1 " + car.toString,
              move_time + " 2 " + walking.toString,
              move_time + " 3 " + subway.toString,
              move_time + " 4 " + biking.toString)
            ).map(_.split(" "))

            val move_analysisRDD2 = move_analysisRDD.map(p => Row(p(0), p(1).toInt, p(2).toInt))
            val move_analysisDF = spark.createDataFrame(move_analysisRDD2, schema_move_analysis)
            move_analysisDF.show()
            move_analysisDF.write.mode(SaveMode.Append).jdbc("jdbc:mysql://localhost:3306/test?useSSL=false", "move_analysis", properties)

          }
        }

        if (isCheck == 1) {
          rightNow.add(Calendar.MINUTE, 10)
          lateCheckTS = sdf.format(rightNow.getTime())
        } else {
          rightNow.add(Calendar.MINUTE, 5)
          lateCheckTS = sdf.format(rightNow.getTime())
        }

      }
    } catch {
      case e: Exception => e.printStackTrace()
        conn.rollback()
    } finally {
      conn.close()
    }
  }
}
