package com.data.source.api

import org.apache.spark.sql.SparkSession

object HiveMysqlApp {
  def main(args: Array[String]): Unit = {
    val spark = SparkSession.builder().appName("HiveMysqlApp").master("local[*]").getOrCreate()

    //  加载hive表数据
    val hiveDF = spark.table("emp")

    // 加载mysql表数据
    val mysqlDF = spark.read.format("jdbc").option("url", "jdbc:mysql://localhost:3306")
      .option("dbtable", "spark.dep").option("user", "root").option("password", "root")
      .option("driver", "com.mysql.jdbc.Driver").load()

    // join操作
    val joinDF = hiveDF.join(mysqlDF, hiveDF.col("deptno") === mysqlDF.col("deptno"))
    joinDF.show()

    //  保存数据到hive库
    joinDF.write.saveAsTable("new_emp")

    spark.stop()
  }
}
