package com.zhao.sparksql

import java.text.SimpleDateFormat
import java.util.Properties


import org.apache.log4j.{Level, Logger}
import org.apache.spark.SparkConf
import org.apache.spark.rdd.RDD
import org.apache.spark.sql.{SaveMode, SparkSession}

import scala.collection.mutable.ArrayBuffer

/**
 * Description: <br/>
 * Copyright (c) ，2020 ， 赵 <br/>
 * This program is protected by copyright laws. <br/>
 * Date： 2020/12/28 21:10
 *
 * @author 柒柒
 * @version : 1.0
 */

case class Result(stat_date: String,distinct_id: String,times: String,app_version: String,manufacturer: String,model: String,os: String,
                  os_version: String,service_name: String, province: String, city: String, area: String, toon_type: String, event: String,
                  ip: String)

object SensorsEventRes{
  def main(args: Array[String]): Unit = {
    if (args.length < 4) {
      println(
        """
          |请输入正确的参数：
          |1. mysqlusername
          |2. mysqlpwd
          |3. jdbcurl
          |4. yesterday
          """.stripMargin)
      sys.exit(1)
    }

    val (username, mysqlpwd, jdbcurl, yesterday) = (args(0), args(1), args(2), args(3))
    val date = new SimpleDateFormat("yyyyMMdd").parse(yesterday)
    val yesterday1 = new SimpleDateFormat("yyyy-MM-dd").format(date)

    Logger.getLogger("org").setLevel(Level.WARN)

    val conf = new SparkConf().setAppName(this.getClass.getSimpleName)
    //      .setMaster("local[*]")

    val spark = SparkSession
      .builder()
      .config(conf)
      .enableHiveSupport()
      .getOrCreate()
    import spark.implicits._

    val resultRDD: RDD[Result] = spark.sql(
      s"""
         |select
         |'${yesterday1}' AS stat_date, distinct_id, times, p_app_version as app_version, p_manufacturer as manufacturer, p_model as model, p_os as os, p_os_version as os_version, service_name, province, city, area, toon_type, event, ip
         |FROM ods.t_sensors_events
         |WHERE log_day='${yesterday}'
         |        AND length(distinct_id) <15
         |""".stripMargin).as[Result].rdd.coalesce(5)

    // 输出mysql
    resultRDD.foreachPartition(iter =>{
      val pool = new MySqlPool(username, mysqlpwd, jdbcurl)
      val connection = pool.getConn()
      connection.prepareStatement(
        s"""
           |delete from t_sensors_events
           |where stat_date='$yesterday1'
           |""".stripMargin).execute()

      val ps = connection.prepareStatement(
        """
          |insert into t_sensors_events
          |(stat_date,distinct_id,times,app_version,manufacturer,model,os,os_version,service_name,province,city,area,toon_type,event,ip)
          |values(?,?,?,?,?,?,?,?,?,?,?,?,?,?,?)
          |""".stripMargin)
      var i = 0
      while(iter.hasNext){
        val result = iter.next()
        ps.setString(1,result.stat_date)
        ps.setLong(2,result.distinct_id.toLong)
        ps.setString(3,result.times)
        ps.setString(4,result.app_version)
        ps.setString(5,result.manufacturer)
        ps.setString(6,result.model)
        ps.setString(7,result.os)
        ps.setString(8,result.os_version)
        ps.setString(9,result.service_name)
        ps.setString(10,result.province)
        ps.setString(11,result.city)
        ps.setString(12,result.area)
        ps.setString(13,result.toon_type)
        ps.setString(14,result.event)
        ps.setString(15,result.ip)
        ps.addBatch()
        i=i+1
        if (i == 1000) {
          ps.executeBatch()
          i = 0
        }
      }
      if(i != 0) ps.executeBatch()
      pool.returnConn(connection, ps)
    })

  }
}
