package com.zhao.sparksql

import org.apache.spark.SparkConf
import org.apache.spark.sql.{DataFrame, SaveMode, SparkSession}

/**
 * Description: <br/>
 * Copyright (c) ，2020 ， 赵 <br/>
 * This program is protected by copyright laws. <br/>
 * Date： 2020/12/23 11:09
 *
 * @author 柒柒
 * @version : 1.0
 */

object sensors_event {
  def main(args: Array[String]): Unit = {

    val conf = new SparkConf().setAppName(this.getClass.getSimpleName);
    val spark = SparkSession
      .builder()
      .enableHiveSupport()
      .config(conf)
      .getOrCreate()
    //.config("spark.sql.inMemoryColumnarStorage.batchSize", 10)


    //        val yesterday = new SimpleDateFormat("yyyyMMdd")
    //        val yesterday1 = new SimpleDateFormat("yyyy-MM-dd")
    //        val cal: Calendar = Calendar.getInstance()
    //        cal.add(Calendar.DATE,-1)
    //        println(yesterday.format(cal.getTime))
    //        println(yesterday1.format(cal.getTime))

    val yesterday = args(0)


    //执行sql
    val result: DataFrame = spark.sql(
      s"""
         |SELECT 0 as id, from_unixtime(unix_timestamp('${yesterday}','yyyymmdd'),'yyyy-mm-dd') AS stat_date, distinct_id, times, p_app_version as app_version, p_manufacturer as manufacturer, p_model as model, p_os as os, p_os_version as os_version, service_name, province, city, area, toon_type, event, ip
         |FROM ods.t_sensors_events
         |WHERE log_day='${yesterday}'
         |        AND length(distinct_id) <15
         |""".stripMargin)

    result.write.format(source = "jdbc")
      .mode(SaveMode.Append)
      .option("url", "jdbc:mysql://172.28.247.156:3316/kpi_toon?serverTimezone=UTC&characterEncoding=utf-8")
      .option("dbtable", s"t_sensors_events")
      .option("user", "bigdatas_rw")
      .option("password", "Pro.Bigdata0914")
      .option("driver", "com.mysql.jdbc.Driver")
      .save()


    //停止Spark
    spark.stop()
  }
}
