package com.hive.dim

import org.apache.spark.SparkConf
import org.apache.spark.sql.SparkSession

object dimAreaCode {
    def main(args: Array[String]): Unit = {

        System.setProperty("HADOOP_USER_NAME","root")
        val sparkSession = SparkSession
          .builder()
          .appName("HiveDim")
          .master("local[*]")
          .enableHiveSupport()
          .config("spark","hdfs://ns")
          .getOrCreate()

        sparkSession.sql("show databases").show

        //todo 从这里开始
//        object DwdMemberController {
//            def main(args: Array[String]): Unit = {
//                System.setProperty("HADOOP_USER_NAME", "atguigu")
//                val sparkConf = new SparkConf()
//                  .setAppName("dwd_member_import").setMaster("local[*]")
//
//                val sparkSession = SparkSession.
//                  builder()
//                  .config(sparkConf)
//                  .enableHiveSupport
//                  .getOrCreate()
//
//                val ssc = sparkSession.sparkContext
//
//
//                sparkSession.close()




        }
    }