package com.bigdata.lk

import org.apache.spark.sql.SparkSession

object SparkHiveConn {

  def main(args: Array[String]): Unit = {


    val spark = SparkSession.builder()
      .appName("SparkHiveConn")
      .config("spark.sql.warehouse.dir", "hdfs://master:9000/user/hive/warehouse")
      .enableHiveSupport()
      .getOrCreate()

    val url = "jdbc:mysql://192.168.23.51/shtd_store?useSSL=false"

    val jdbcDF = spark.sqlContext.read.format("jdbc").options(
      Map(
        "url" -> url,
        "user" -> "root",
        "password" -> "123456",
        "dbtable" -> "ORDERS"
      )
    ).load()

//    jdbcDF.show(10)

    jdbcDF.createOrReplaceTempView("ORDERS_mysql")

    val mysqlDF = spark.sqlContext.sql("select * from ORDERS_mysql")

    mysqlDF.createOrReplaceTempView("ORDERS_mysql_temp")

    spark.sqlContext.sql("use ods")

    spark.sqlContext.sql(
      """
        |insert into table orders_lk
        |partition(etldate='20230414')
        |select
        |*
        |from ORDERS_mysql_temp
        |""".stripMargin).show(10)










  }

}
