package cn.wangjie.spark.hive

import org.apache.spark.sql.{DataFrame, SparkSession}

/**
 * SparkSQL集成Hive，读取表中数据进行分析处理
 */
object SparkSQLHive2 {

	def main(args: Array[String]): Unit = {

		// 构建SparkSession实例对象
		val spark: SparkSession = SparkSession.builder()
			.appName(this.getClass.getSimpleName.stripSuffix("$"))
			.master("local[2]")
			.config("spark.sql.shuffle.patitions", "4")
			// TODO: 集成Hive时，设置HiveMetaStore服务地址
			.config("hive.metastore.uris", "thrift://node01:9083")
			// TODO: 表示集成Hive
			.enableHiveSupport()
			.getOrCreate()


		// TODO: 加载Hive表数据
		val empDF = spark.read.table("tra_tcc_date")
		empDF.printSchema()
		empDF.show(20, truncate = false)

		println("================================================")

		/*val resultDF: DataFrame = spark.sql(
			"""
			  |select e.ename, e.sal, d.dname from db_hive.emp e join db_hive.dept d on e.deptno = d.deptno
			  |""".stripMargin)
		resultDF.show(20, truncate = false)*/


		// 应用结束， 关闭资源
		spark.stop()

	}

}
