package com.imooc.spark

import org.apache.spark.sql.SparkSession

/**
 * 使用外部数据源综合查询Hive和MySQL的表数据
 */
object HiveMysqlApp {
  def main(args: Array[String]): Unit = {
    //1)创建SparkSession对象
    val spark = SparkSession
      .builder()
      .appName("HiveMysqlApp")
      .master("local[2]")
      .getOrCreate()

    //加载hive数据
    val hiveDF = spark.table("emp")

    //加载mysql表数据
    val mysqlDF = spark
      .read
      .format("jdbc")
      .option("url", "jdbc:mysql://localhost:3306")
      .option("dbtable", "spark.DEPT")
      .option("user", "root")
      .option("password", "654321")
      .option("driver", "com.mysql.jdbc.Driver")
      .load()


    val resultDF = hiveDF.join(mysqlDF, hiveDF.col("deptno") === mysqlDF.col("deptno"))
    resultDF.show()


    resultDF.select(hiveDF.col("empno"), hiveDF.col("ename"),
      mysqlDF.col("deptno"), mysqlDF.col("dname")).show

    spark.stop()


  }
}
