package com.ada.spark.datasource

import java.io.File

import org.apache.spark.SparkConf
import org.apache.spark.sql.SparkSession

object HiveTest {

    def main(args: Array[String]): Unit = {
        //创建配置对象
        val conf = new SparkConf()
            .setAppName("HiveTest")
            .setMaster("local[*]")

        val spark = SparkSession
            .builder()
            .config(conf)
            .enableHiveSupport()
            .getOrCreate()

        spark.sql(
            """
              |SELECT
              |    e.ename,
              |    d.dname,
              |    l.loc_name
              |FROM
              |    hive.emp e
              |JOIN hive.dept d ON
              |    d.deptno = e.deptno
              |JOIN hive.location l ON
              |    d.loc = l.loc
            """.stripMargin).show()
    }
}
