package com.spark.mysql

import java.text.SimpleDateFormat
import java.sql.DriverManager
import java.text.SimpleDateFormat
import java.util.Properties
import java.{sql, util}

import org.apache.commons.lang3.StringUtils
import org.apache.spark
import org.apache.spark.rdd.RDD
import org.apache.spark.sql.{Row, SaveMode, SparkSession}
import org.apache.spark.sql.types.{IntegerType, StringType, StructField, StructType}
import org.apache.spark.{SparkConf, SparkContext}

import scala.util.control.Breaks._

import com.spark.mysql.insertFunction._;

import scala.collection.mutable.ArrayBuffer

/**
  * @Time:2020-4-17
  * @Author:MoonKnight
  *
  */

object zhuliuanalyse {

  var imsiset: ArrayBuffer[String] = new ArrayBuffer() //imsi集合
  var llset: ArrayBuffer[String] = new ArrayBuffer() //ll集合

  val sdf: SimpleDateFormat = new SimpleDateFormat("yyyyMMddHHmmss")
  var statement = conn.createStatement()

  /**
    * 时间段统计
    *
    * @param start
    * @param end
    * @param typenum
    */
  def timeSpandStaytime(start: String, end: String, typenum: Int): Unit = {
    for (x <- imsiset) {
      val sql = "select * from zhuliudata where imsi=\"%1$s\" and time >= \"%2$s\" and time <= \"%3$s\";".format(x, start, end)
      val resultSet = statement.executeQuery(sql)
      var lastrecord: Option[(String, String, String)] = Some(null)

      while (resultSet.next()) {
        val time = resultSet.getString("time")
        val ll = resultSet.getString("ll")
        val imsi = resultSet.getString("imsi")
        var s = (time, ll, imsi)
//        println("s1: " + s)
//        s = lastrecord.get
//        println("s2: " + s)
//        println()
//        println()
        if (lastrecord.equals(Some(null))) {
          lastrecord = Some(s)
        }
        else {
          var staytime = (sdf.parse(time).getTime() - sdf.parse(lastrecord.get._1).getTime()) / 1000
          if (staytime >= 1800) {
            var s = (lastrecord.get._1, time, ll, imsi, staytime, typenum)
            insertToStaytime(s)
          }
          lastrecord = Some(s)
        }
      }
      if (!lastrecord.equals(Some(null))) {
        var staytime = (sdf.parse(end).getTime() - sdf.parse(lastrecord.get._1).getTime()) / 1000
        if (staytime >= 1800) {
          var s = (lastrecord.get._1, end, lastrecord.get._2, lastrecord.get._3, staytime, typenum)
          insertToStaytime(s)
        }
      }
    }
  }

  /**
    * 时间段数据分析
    *
    * @param typenum
    */
  def TimeSpandAnalyse(typenum: Int): Unit = {
    for (x <- llset) {
      var sql = "select count(num) as count from (select count(imsi) as num from staytime where type = %1$d and ll = \"%2$s\" group by imsi) t ;".format(typenum, x)
      var resultSet = statement.executeQuery(sql)
      resultSet.next()
      var num = resultSet.getInt("count")

      //按照基站获取总驻留时间
      sql = "select ll,sum(staytime) as avgtime from staytime where type = %1$d and ll = \"%2$s\" group by ll".format(typenum, x)
      resultSet = statement.executeQuery(sql)
      while (resultSet.next()) {
        var avgtime = resultSet.getLong("avgtime") / num
        var ll = resultSet.getString("ll")
        insertToAnalyse((ll, avgtime,typenum))
      }
    }
  }


  def main(args: Array[String]): Unit = {

    val spark = SparkSession.builder().appName("zhuliuanalyse").master("local[*]").getOrCreate()
    val properties = new Properties()
    spark.sparkContext.setLogLevel("ERROR")

    properties.setProperty("user","root")
    properties.setProperty("password","root")

    var sql = "select imsi from mobile_data group by imsi;" // 可以改为(DISTINCT)
    var resultSet = statement.executeQuery(sql)

    /**
      * 以防万一，首先清空数据库
      */
    var clearStatement = conn.prepareCall("truncate table zhuliudata")
    clearStatement.execute()
    clearStatement = conn.prepareCall("truncate table staytime")
    clearStatement.execute()
    //clearStatement = conn.prepareCall("truncate table analyseday")
    //clearStatement.execute()
    //clearStatement = conn.prepareCall("truncate table analysenight")
    //clearStatement.execute()
    clearStatement = conn.prepareCall("truncate table analyse")
    clearStatement.execute()
    clearStatement = conn.prepareCall("truncate table final")
    clearStatement.execute()



    /**
      * 筛选出所有用户的imsi
      */

    while (resultSet.next()) {
      val imsi = resultSet.getString("imsi")
      imsiset += (imsi)
    }


    /**
      * 进行连续时间记录的去重
      */

    for (x <- imsiset) {
      sql = "select * from mobile_data where imsi=\"%1$s\";".format(x)
      resultSet = statement.executeQuery(sql)
      var lastrecord: Option[(String, String, String)] = Some(null)

      while (resultSet.next()) {
        val time = resultSet.getString("date")
        val longtitude = resultSet.getFloat("longitude").toString
        val latitude = resultSet.getFloat("latitude").toString
        val imsi = resultSet.getString("imsi")
        var s = (time, longtitude + '_' + latitude, imsi)
        if (lastrecord.equals(Some(null))) {
          lastrecord = Some(s)
          insertTozhuliu(lastrecord.get)
        }
        else if (!s._2.equals(lastrecord.get._2)) {
          insertTozhuliu(s)
        }
        lastrecord = Some(s)
      }
    }


    /**
      * 驻留时间统计
      */
    timeSpandStaytime("20181003090000","20181003180000",1)  //工作日白天时间统计
    timeSpandStaytime("20181003000000","20181003070000",3)  //凌晨时间统计
    timeSpandStaytime("20181003180000","20181004000000",2)  //夜晚时间统计


    /**
      * 获取所有的基站ll
      */
    sql = "select ll from staytime group by ll;"
    resultSet = statement.executeQuery(sql)

    while (resultSet.next()) {
      val ll = resultSet.getString("ll")
      llset += (ll)
    }


    /**
      * 驻留时间统计分析
      */
    TimeSpandAnalyse(1)  //工作日白天驻留时间统计分析
    TimeSpandAnalyse(2)  //夜晚驻留时间统计分析
    TimeSpandAnalyse(3)  //凌晨驻留时间统计分析

    /**
      * 合并结果表
      */
    val analyseDF = spark.read.jdbc("jdbc:mysql://localhost:3306/test?useUnicode=true&characterEncoding=utf-8&useSSL=false","analyse",properties)

    var dayDF = analyseDF.filter("type = 1 ").select("ll","avgtime")
    var nightDF = analyseDF.filter("type = 2 ").select("ll","avgtime")
    var midnightDF = analyseDF.filter("type = 3 ").select("ll","avgtime")

    dayDF = dayDF.withColumnRenamed("avgtime","daytime")
    dayDF.show()
    nightDF = nightDF.withColumnRenamed("avgtime","nighttime")
    nightDF.show()
    midnightDF = midnightDF.withColumnRenamed("avgtime","midnighttime")
    midnightDF.show()

    //三表全连接
    var result = dayDF.join(nightDF, Seq("ll"), "full_outer").join(midnightDF,Seq("ll"),"full_outer")
    //填充NA
    result = result.na.fill(0)
    //最终结果写入final表
    result.write.mode(SaveMode.Append).jdbc("jdbc:mysql://localhost:3306/test?useUnicode=true&characterEncoding=utf-8&useSSL=false", "final", properties)


    /*
    //val writer = new PrintWriter(new File("data.csv"))
    sql = "select * from analyseday left join analysenight on analyseday.ll = analysenight.ll union select * from analyseday right join analysenight on analyseday.ll = analysenight.ll;"
    resultSet = statement.executeQuery(sql)
    while (resultSet.next()) {
      var daytime = resultSet.getLong(2)
      var nighttime = resultSet.getLong(4)
      var ll: Option[String] = Some(resultSet.getString(1))
      if (ll.equals(Some(null))) {
        var lll = resultSet.getString(3)
        //writer.println(lll+ ',' +daytime +  ',' +nighttime)
        insertToFinal((lll, daytime, nighttime))
      } else {
        insertToFinal((ll.get, daytime, nighttime))
        //writer.println(lll+ ',' +daytime +  ',' +nighttime)
      }
    }
    */
    //writer.close()
    //sc.stop()
  }
}
