package org.example.table

import cn.hutool.core.date.DateUtil
import lombok.extern.slf4j.Slf4j
import org.apache.spark.SparkConf
import org.apache.spark.sql.SparkSession

@Slf4j
object UaiCurveDwstable {
  def main(args: Array[String]): Unit = {
    val sparkConf = new SparkConf()
    val spark = SparkSession.builder()
      .appName("uaiCurveDwsTable")
      .enableHiveSupport()
      .config("spark.master", "local[*]") // 使用本地模式，[*]表示使用所有可用的核心
      .config(sparkConf)
      .getOrCreate();

    //    if (args.length > 0) {
    //      sparkSession = SparkSession.builder()
    //        .appName("uaiCurveDwsTable")
    //        .enableHiveSupport()
    //        .getOrCreate()
    //    }else{
    //      sparkSession = SparkSession.builder()
    //        .appName("uaiCurveDwsTable")
    //        .enableHiveSupport()
    //        .config("spark.master", "local[*]") // 使用本地模式，[*]表示使用所有可用的核心
    //        .getOrCreate()
    //    }
    //进行编写
    println("=================开始查询E_MP_U_CURVE数据===================")
    val startDate = DateUtil.beginOfDay(DateUtil.date).toString("yyyyMMdd")
    val endDate: String = DateUtil.format(DateUtil.offsetDay(DateUtil.date(), -1), "yyyyMMdd")
    val table = sparkConf.get("spark.app.table", "e_mp_u_curve")
    //    val startTime = sparkConf.get("spark.app.startTime", endDate)
    val startTime = sparkConf.get("spark.app.startTime")
    val endTime = sparkConf.get("spark.app.endTime", startDate)
    val dwsMeter = sparkConf.get("spark.app.dwsMeter", "t_bus_meterpointmng")
    val dwsTable = sparkConf.get("spark.app.dwsTable", "t_obj_202312ld_hour_va")
    val url = sparkConf.get("spark.app.url", "jdbc:mysql://localhost:3306/test")
    val username = sparkConf.get("spark.app.username", "root")
    val password = sparkConf.get("spark.app.password", "9XME3z94xs9nhCj")
    println("mysql 连接的参数连接：{},表名:{},用户:{}，密码：{}", url, dwsMeter, username, password)
    println("hive 参数，表名：{},开始时间：{},结束时间：{}", table, startDate, endDate)
    val dataFrame = spark.sql("show databases")
    //    val databases: Dataset[Row] = spark.sql("show databases")
    dataFrame.show()

  }

}
