package com.spark.test

import java.text.SimpleDateFormat
import java.util

import org.apache.commons.lang3.StringUtils
import org.apache.kafka.clients.producer.{KafkaProducer, ProducerConfig, ProducerRecord}
import org.apache.spark.rdd.RDD
import org.apache.spark.{SparkConf, SparkContext}

/**
  * Created by hjn on 2020.3.8
  */

object mobileDataTest {

  def main(args: Array[String]): Unit = {

    // 参数处理
    if (args.length < 3) {
      System.err.println("Usage: <metadataBrokerList> <topic> " +
        "<messagesPerSec>")
      System.exit(1)
    }
    val Array(brokers, topic, messagesPerSec) = args

    val conf = new SparkConf().setAppName("mobileDataTest").setMaster("local[*]")
    conf.set("spark.serializer", "org.apache.spark.serializer.KryoSerializer")
    val sc = new SparkContext(conf)
    sc.setLogLevel("ERROR")

    // 1.读取数据文件
    val user = sc.textFile("file:///home/hadoop/Desktop/test_scala/data/data.csv")
    val base = sc.textFile("file:///home/hadoop/Desktop/test_scala/data/base.csv")

    val user_data = user.map(line => {
      val fields = line.split(",")
      val timestamp = fields(0)
      val imsi = fields(1)
      val lac_id = fields(2)
      val cell_id = fields(3)
      val laci = lac_id + "-" + cell_id
      ((imsi, laci), timestamp)
    })

    val base_data = base.map(line => {
      val fields = line.split(",")
      val x = fields(0)
      val y = fields(1)
      val laci = fields(2)
      (laci, (x, y))
    })

    val pmt = user_data.map(line => {
      //x._1对应的是元组（（imsi,lac）,time）中的（imsi，lac）
      //x._2对应的是元组（（imsi,lac）,time）中的time
      ((line._1._2), (line._1._1, line._2))
      //(基站ID,(imsi，时间))
    })

    // 增加了时间排序
    var joined: RDD[(String, ((String, String), (String, String)))] = pmt.join(base_data)

    joined = joined.repartition(1)

    joined = joined.sortBy(_._2._1._2, true)


    joined.foreachPartition(x => {

      // Zookeeper connection properties
      val props = new util.HashMap[String, Object]()
      props.put(ProducerConfig.BOOTSTRAP_SERVERS_CONFIG, brokers)
      props.put(ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG,
        "org.apache.kafka.common.serialization.StringSerializer")
      props.put(ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG,
        "org.apache.kafka.common.serialization.StringSerializer")
      val producer = new KafkaProducer[String, String](props)

      for (line <- x) {
        println()
        println()
        val str: String = {
          val laci = line._1
          val imsi = line._2._1._1
          val timestamp = line._2._1._2
          val longtitude = line._2._2._1
          val latitude = line._2._2._2

          val sdf: SimpleDateFormat = new SimpleDateFormat("yyyyMMddHHmmss")
          val date: String = sdf.format(timestamp.toLong)
          //        println("date:"+date)

          // 判断imsi是否包含{*,^,#}
          var array = if (imsi.contains("*") || imsi.contains("^") || imsi.contains("#")) Array[String]("empty") else Array[String](date, imsi, laci, longtitude, latitude)
          array = if (StringUtils.isEmpty(imsi)) Array[String]("empty") else array
          array = if (StringUtils.isEmpty(laci)) Array[String]("empty") else array
          array = if (!date.contains("20181003")) Array[String]("empty") else array
          array
        }.mkString(",")

        if (!str.equals("empty")) {
          print(str)
          val message = new ProducerRecord[String, String](topic, null, str)
          producer.send(message)
          Thread.sleep(messagesPerSec.toInt) // 暂停 messagesPerSec ms
        }
      }
    })

    sc.stop()
  }
}