package com.xiaoxu.spark.ExternalDataSource

import org.apache.spark.sql.{SaveMode, SparkSession}

/**
  * 使用外部数据源综合查询Hive和MySQL的表数据
  */
object HiveWithMysqlDemo {

  def main(args: Array[String]) {
    val spark = SparkSession.builder().appName("HiveMySQLApp")
      .master("local[2]").getOrCreate()

    // 加载Hive表数据
    //val hiveDF = spark.table("emp")

    // 加载MySQL表数据
    val mysqlDF = spark
      .read
      .format("jdbc")
      .option("url", "jdbc:mysql://39.105.44.223:3306")
      .option("dbtable", "hive.dept")
      .option("user", "dbadmin")
      .option("password", "dbadmin")
      .option("driver", "com.mysql.jdbc.Driver")
      .load()

    mysqlDF.show()
    mysqlDF.write.mode(SaveMode.Overwrite).saveAsTable("hive_records")
    spark.table("hive_records").show
    // JOIN
    /*val resultDF =
      hiveDF.join(mysqlDF, hiveDF.col("deptno") === mysqlDF.col("DEPTNO"))

    resultDF.show


    resultDF
      .select(
        hiveDF.col("empno"),
        hiveDF.col("ename"),
        mysqlDF.col("deptno"),
        mysqlDF.col("dname"))
      .show*/

    spark.stop()
  }

}
