package com.xl.competition.old.task2

import org.apache.spark.sql.{DataFrame, SparkSession}

import java.util.Properties

/**
 * @author: xl
 * @createTime: 2023/11/13 22:46:42
 * @program: com.xl.competition
 * @description: Tststasrtfawt
 */
object task2 {
  def main(args: Array[String]): Unit = {
    System.setProperty("HADOOP_USER_NAME", "root")
    val spark: SparkSession = SparkSession.builder().appName("task2").enableHiveSupport().config("hive.metastore.uris", "thrift://master:9083").getOrCreate()

    //    cong mysql zhong dushuju
    //    shift enter    main + tab  .var + tab   ctrl + p  整理代码：ctrl + alt + l

    //    yonghu   mima
    val properties: Properties = new Properties()
    properties.put("user", "root")
    properties.put("password", "Abc123..")

    val wjy: DataFrame = spark.read.jdbc("jdbc:mysql://192.168.0.1:3306/shtd_store", "user_info", properties) //读取到了 MySQL的数据
//    先拿到 ods 中  每条数据最大的max(operate_time,create_time) as a

    wjy.createTempView("temp_user_info")
    spark.sql(
      """
        |insert into ods.user_info partition("20231112")
        |select * from temp_user_info
        |where max(operate_time,create_time) > a
        |""".stripMargin)

    spark.sql(
      """
        |
        |""".stripMargin)

    spark.stop()
  }
}
