package com.spark.test

import java.sql
import java.sql.DriverManager
import java.text.SimpleDateFormat

import org.apache.commons.lang3.StringUtils
import org.apache.spark.rdd.RDD
import org.apache.spark.{SparkConf, SparkContext}

/**
  * Created by hjn on 2020.3.8
  */

object InsertDataToMySQL {

  def func(records: Iterator[(String, ((String, String), (String, String)))]) {
    var conn: sql.Connection = null
    var stmt: sql.PreparedStatement = null
    try {
      val url = "jdbc:mysql://localhost:3306/test?useUnicode=true&characterEncoding=utf-8&useSSL=false"
      val user = "root"
      val password = "root" //数据库密码是root，请改成你自己的mysql数据库密码
      conn = DriverManager.getConnection(url, user, password)
      records.foreach(line => {
        val laci = line._1
        val imsi = line._2._1._1
        val timestamp = line._2._1._2
        val longitude = line._2._2._1
        val latitude = line._2._2._2

        val sdf: SimpleDateFormat = new SimpleDateFormat("yyyyMMddHHmmss")
        val date: String = sdf.format(timestamp.toLong)


        if (!imsi.contains("*") && !imsi.contains("^") && !imsi.contains("#") && StringUtils.isNotEmpty(imsi) && StringUtils.isNotEmpty(laci) && date.contains("20181003")) {

          var sql = "insert into mobileData(date, imsi, laci, longitude, latitude) values (?,?,?,?,?)"
          stmt = conn.prepareStatement(sql);
          stmt.setString(1, date)
          stmt.setString(2, imsi)
          stmt.setString(3, laci)
          stmt.setDouble(4, longitude.toDouble)
          stmt.setDouble(5, latitude.toDouble)
          stmt.executeUpdate()

          printf("insert into mobileData(date, imsi, laci, longitude, latitude) values (%s,%s,%s,%s,%s)", date, imsi, laci, longitude, latitude)
          println()
          println()
        }
      })

    } catch {
      case e: Exception => e.printStackTrace()
        conn.rollback()
    } finally {
      if (stmt != null) {
        stmt.close()
      }
      if (conn != null) {
        conn.close()
      }
    }
  }

  def main(args: Array[String]): Unit = {

    val conf = new SparkConf().setAppName("InsertDataToMySQL").setMaster("local[*]")
    //    conf.set("spark.serializer", "org.apache.spark.serializer.KryoSerializer")
    val sc = new SparkContext(conf)
    sc.setLogLevel("ERROR")

    // 1.读取数据文件
    val user = sc.textFile("file:///home/hadoop/Desktop/test_scala/data/data.csv")
    val base = sc.textFile("file:///home/hadoop/Desktop/test_scala/data/base.csv")

    val user_data = user.map(line => {
      val fields = line.split(",")
      val timestamp = fields(0)
      val imsi = fields(1)
      val lac_id = fields(2)
      val cell_id = fields(3)
      val laci = lac_id + "-" + cell_id
      ((imsi, laci), timestamp)
    })

    val base_data = base.map(line => {
      val fields = line.split(",")
      val x = fields(0)
      val y = fields(1)
      val laci = fields(2)
      (laci, (x, y))
    })

    val pmt = user_data.map(line => {
      //x._1对应的是元组（（imsi,lac）,time）中的（imsi，lac）
      //x._2对应的是元组（（imsi,lac）,time）中的time
      ((line._1._2), (line._1._1, line._2))
      //(基站ID,(imsi，时间))
    })

    var joined: RDD[(String, ((String, String), (String, String)))] = pmt.join(base_data)

    joined = joined.sortBy(_._2._1._2, true)

    val repartitionRDD = joined.repartition(1)
    repartitionRDD.foreachPartition(func)

    sc.stop()
  }
}